import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np
from tensorflow.keras import layers, models
from tensorflow.keras.losses import SparseCategoricalCrossentropy
from tensorflow.keras.utils import to_categorical
import numpy as np
import keras
import matplotlib.pyplot as plt
from sklearn.model_selection import KFold
from keras import models, layers
import pandas as pd
import os
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, BatchNormalization, Dropout
from keras.optimizers import Adam
from keras.preprocessing.image import ImageDataGenerator
from keras import regularizers
from keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau, CSVLogger, LearningRateScheduler
# from tensorflow.keras.callbacks import ReduceLROnPlateau, CSVLogger
# from tensorflow.keras.callbacks import LearningRateScheduler
import seaborn as sns
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings('ignore')
os.environ['PYTHONHASHSEED'] = '1'
keras.utils.set_random_seed(42)
tf.random.set_seed(42)
#generate random weights for reproducibility
np.random.seed(42)
import random
random.seed(42)
weights = {}
weights = {key: None for key in np.arange(0, 15)}
weights = {key: 1 for key in np.arange(0, 15)}
2023-11-26 13:03:38.404969: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
To enable the following instructions: AVX2 AVX512F FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
/opt/conda/lib/python3.10/site-packages/tensorflow_io/python/ops/__init__.py:98: UserWarning: unable to load libtensorflow_io_plugins.so: unable to open file: libtensorflow_io_plugins.so, from paths: ['/opt/conda/lib/python3.10/site-packages/tensorflow_io/python/ops/libtensorflow_io_plugins.so']
caused by: ['/opt/conda/lib/python3.10/site-packages/tensorflow_io/python/ops/libtensorflow_io_plugins.so: undefined symbol: _ZN3tsl6StatusC1EN10tensorflow5error4CodeESt17basic_string_viewIcSt11char_traitsIcEENS_14SourceLocationE']
warnings.warn(f"unable to load libtensorflow_io_plugins.so: {e}")
/opt/conda/lib/python3.10/site-packages/tensorflow_io/python/ops/__init__.py:104: UserWarning: file system plugins are not loaded: unable to open file: libtensorflow_io.so, from paths: ['/opt/conda/lib/python3.10/site-packages/tensorflow_io/python/ops/libtensorflow_io.so']
caused by: ['/opt/conda/lib/python3.10/site-packages/tensorflow_io/python/ops/libtensorflow_io.so: undefined symbol: _ZTVN10tensorflow13GcsFileSystemE']
warnings.warn(f"file system plugins are not loaded: {e}")
train_data = tf.keras.utils.image_dataset_from_directory(
'test/train/',
labels='inferred',
label_mode='int',
class_names=None,
color_mode='grayscale',
batch_size=32,
image_size=(224, 224),
shuffle=True,
seed=42,
validation_split=None,
subset=None,
interpolation='bilinear',
crop_to_aspect_ratio=False,
)
Found 9028 files belonging to 15 classes.
2023-11-26 13:03:47.092469: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:996] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero. See more at https://github.com/torvalds/linux/blob/v6.0/Documentation/ABI/testing/sysfs-bus-pci#L344-L355 2023-11-26 13:03:47.123136: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:996] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero. See more at https://github.com/torvalds/linux/blob/v6.0/Documentation/ABI/testing/sysfs-bus-pci#L344-L355 2023-11-26 13:03:47.123425: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:996] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero. See more at https://github.com/torvalds/linux/blob/v6.0/Documentation/ABI/testing/sysfs-bus-pci#L344-L355 2023-11-26 13:03:47.125113: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:996] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero. See more at https://github.com/torvalds/linux/blob/v6.0/Documentation/ABI/testing/sysfs-bus-pci#L344-L355 2023-11-26 13:03:47.125377: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:996] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero. See more at https://github.com/torvalds/linux/blob/v6.0/Documentation/ABI/testing/sysfs-bus-pci#L344-L355 2023-11-26 13:03:47.125556: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:996] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero. See more at https://github.com/torvalds/linux/blob/v6.0/Documentation/ABI/testing/sysfs-bus-pci#L344-L355 2023-11-26 13:03:47.921463: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:996] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero. See more at https://github.com/torvalds/linux/blob/v6.0/Documentation/ABI/testing/sysfs-bus-pci#L344-L355 2023-11-26 13:03:47.921713: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:996] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero. See more at https://github.com/torvalds/linux/blob/v6.0/Documentation/ABI/testing/sysfs-bus-pci#L344-L355 2023-11-26 13:03:47.921897: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:996] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero. See more at https://github.com/torvalds/linux/blob/v6.0/Documentation/ABI/testing/sysfs-bus-pci#L344-L355 2023-11-26 13:03:47.922061: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1635] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 14587 MB memory: -> device: 0, name: Tesla V100-SXM2-16GB, pci bus id: 0000:00:04.0, compute capability: 7.0
for batch in train_data.take(10):
print(batch[1][0])
tf.Tensor(0, shape=(), dtype=int32) tf.Tensor(11, shape=(), dtype=int32) tf.Tensor(10, shape=(), dtype=int32) tf.Tensor(6, shape=(), dtype=int32) tf.Tensor(3, shape=(), dtype=int32) tf.Tensor(9, shape=(), dtype=int32) tf.Tensor(3, shape=(), dtype=int32) tf.Tensor(14, shape=(), dtype=int32) tf.Tensor(14, shape=(), dtype=int32) tf.Tensor(5, shape=(), dtype=int32)
2023-11-26 13:03:48.168431: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype string and shape [9028]
[[{{node Placeholder/_0}}]]
2023-11-26 13:03:48.168971: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_4' with dtype int32 and shape [9028]
[[{{node Placeholder/_4}}]]
validation_data = tf.keras.utils.image_dataset_from_directory(
'test/validation/',
labels='inferred',
label_mode='int',
class_names=None,
color_mode='grayscale',
batch_size=32,
image_size=(224, 224),
shuffle=True,
seed=42,
validation_split=None,
subset=None,
interpolation='bilinear',
crop_to_aspect_ratio=False,
)
Found 3000 files belonging to 15 classes.
test_data = tf.keras.utils.image_dataset_from_directory(
'test/test/',
labels='inferred',
label_mode='int',
class_names=None,
color_mode='grayscale',
batch_size=32,
image_size=(224, 224),
shuffle=True,
seed=42,
validation_split=None,
subset=None,
interpolation='bilinear',
crop_to_aspect_ratio=False,
)
Found 3000 files belonging to 15 classes.
Exploratory Data Analysis (EDA) is a critical step in understanding and preparing data for various purposes, including machine learning and data-driven decision-making. We can perform EDA to further our understanding of the data, and to make better decisions on how we can process it to make it easier for our model to understand.
#sample to check if the data is loaded correctly
for batch in train_data.take(1):
plt.figure()
plt.imshow(batch[0][0])
plt.title(f"Label: {batch[1][0]}")
plt.show()
for batch in test_data.take(1):
plt.figure()
plt.imshow(batch[0][0], cmap='gray')
plt.title(f"Label: {batch[1][0]}")
plt.show()
for batch in validation_data.take(1):
plt.figure()
plt.imshow(batch[0][0], cmap='gray')
plt.title(f"Label: {batch[1][0]}")
plt.show()
2023-11-26 13:03:48.907782: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_4' with dtype int32 and shape [9028]
[[{{node Placeholder/_4}}]]
2023-11-26 13:03:48.908404: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype string and shape [9028]
[[{{node Placeholder/_0}}]]
2023-11-26 13:03:49.301475: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype string and shape [3000]
[[{{node Placeholder/_0}}]]
2023-11-26 13:03:49.301947: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype string and shape [3000]
[[{{node Placeholder/_0}}]]
2023-11-26 13:03:49.598494: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_4' with dtype int32 and shape [3000]
[[{{node Placeholder/_4}}]]
2023-11-26 13:03:49.599162: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype string and shape [3000]
[[{{node Placeholder/_0}}]]
import matplotlib.pyplot as plt
def plot_samples_by_label(data):
# Create a dictionary to store one sample per label
samples_by_label = {}
# Iterate through the training data
for batch in data:
image, label = batch[0][0], batch[1][0].numpy()
# Check if a sample for this label already exists
if label not in samples_by_label:
samples_by_label[label] = image
# Check if we have samples for all labels
if len(samples_by_label) == len(set(labels)):
break
#use subplot to plot a 5x3 grid of images
fig, axes = plt.subplots(nrows=3, ncols=5, figsize=(15,8))
# Loop through each label and subplot
for label, image in samples_by_label.items():
row = int(label // 5)
col = label % 5
ax = axes[row, col]
ax.imshow(image, cmap='gray')
ax.axis('off')
ax.set_title(f"Label: {label}")
labels = [batch[1][0].numpy() for batch in train_data]
# Call the function with your training data
plot_samples_by_label(train_data)
2023-11-26 13:03:49.910977: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype string and shape [9028]
[[{{node Placeholder/_0}}]]
2023-11-26 13:03:49.911580: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype string and shape [9028]
[[{{node Placeholder/_0}}]]
class_counts = {}
for images, labels in train_data:
for label in labels.numpy():
class_name = train_data.class_names[label]
if class_name in class_counts:
class_counts[class_name] += 1
else:
class_counts[class_name] = 1
train_class=pd.DataFrame(list(class_counts.items()),columns=['Class','count'])
train_class=train_class.sort_values(by=['count'],ascending=False)
train_class.plot(kind='bar',figsize=(10,5),title="Number of images in each class",x='Class',y='count')
<Axes: title={'center': 'Number of images in each class'}, xlabel='Class'>
Based on the output of the previous cell, we can see that the dataset is not evenly distributed across all labels. This can be problematic for our model, as it may not be able to learn from the data effectively if it is biased towards certain labels.
One way to address this issue is to shift the weights in the model to account for the uneven distribution of labels. This can be done by assigning higher weights to underrepresented labels and lower weights to overrepresented labels.
For example, we can use the class_weight parameter in Keras to assign weights to each class based on their frequency in the dataset. This will ensure that the model pays more attention to underrepresented classes during training.
# Convert datasets to NumPy arrays
train_images, train_labels = zip(*[(image, label) for image, label in train_data.as_numpy_iterator()])
validation_images, validation_labels = zip(*[(image, label) for image, label in validation_data.as_numpy_iterator()])
test_images,test_labels= zip(*[(image, label) for image, label in test_data.as_numpy_iterator()])
#train set
X_train = np.concatenate(train_images, axis=0)
y_train = np.concatenate(train_labels, axis=0)
#val
X_val = np.concatenate(validation_images, axis=0)
y_val = np.concatenate(validation_labels, axis=0)
#test
X_test = np.concatenate(test_images, axis=0)
y_test = np.concatenate(test_labels, axis=0)
2023-11-26 13:03:56.202024: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_4' with dtype int32 and shape [3000]
[[{{node Placeholder/_4}}]]
2023-11-26 13:03:56.202344: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_4' with dtype int32 and shape [3000]
[[{{node Placeholder/_4}}]]
2023-11-26 13:03:56.604515: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_4' with dtype int32 and shape [3000]
[[{{node Placeholder/_4}}]]
2023-11-26 13:03:56.604996: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_4' with dtype int32 and shape [3000]
[[{{node Placeholder/_4}}]]
# Resize images to (31, 31) using TensorFlow
X_train_resized = tf.image.resize(X_train, (31, 31))
X_val_resized = tf.image.resize(X_val, (31, 31))
# Reshape and normalize
X_train31 = X_train_resized.numpy().reshape(X_train_resized.shape[0], 31, 31, 1).astype('float32') / 255
X_val31 = X_val_resized.numpy().reshape(X_val_resized.shape[0], 31, 31, 1).astype('float32') / 255
validation_labels = to_categorical(y_val)
train_labels = to_categorical(y_train)
We can first try fitting the data in an extremely simple model to get a baseline understanding of the problem. This initial model serves as a reference point for evaluating the performance of more complex models. The base model will be straightforward and quick to implement, which provides a easy way capture the underlying patterns in the data.
Simplicity: The base model is intentionally kept simple to establish a basic understanding of the data and the problem at hand.
Quick Implementation: It is designed for rapid implementation, allowing for a fast assessment of initial performance.
Benchmark: The base model serves as a benchmark against which more sophisticated models can be compared. Any subsequent model should outperform this baseline to be considered effective.
The evaluation metric we will be using will be val accuracy as well as val loss, this will allow us to rougly understand the model's accuracy (val accuracy) as well as its rate of error (val loss)
import matplotlib.pyplot as plt
def plot_history(history):
fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(15,8))
# Plot training & validation accuracy values
ax = axes[1]
ax.plot(history.history['accuracy'])
ax.plot(history.history['val_accuracy'])
ax.set_title('Model accuracy')
ax.set_ylabel('Accuracy')
ax.set_xlabel('Epoch')
ax.legend(['Train', 'Validation'], loc='upper left')
# Plot training & validation loss values
ax = axes[0]
ax.plot(history.history['loss'])
ax.plot(history.history['val_loss'])
ax.set_title('Model loss')
ax.set_ylabel('Loss')
ax.set_xlabel('Epoch')
ax.legend(['Train', 'Validation'], loc='upper left')
df31 = pd.DataFrame(columns = ['Model Name', 'Train Accuracy', 'Validation Accuracy', 'Train Loss', 'Validation Loss', 'History'])
model_31 = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(31, 31, 1)),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.Flatten(),
layers.Dense(64, activation='relu'),
layers.Dense(15, activation='softmax')
])
# Compile the model
model_31.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
# Train the model
history = model_31.fit(X_train31, train_labels, validation_data=(X_val31, validation_labels),
epochs=10, batch_size=32, verbose=2, class_weight = weights)
scores = model_31.evaluate(X_val31, validation_labels, verbose=0)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df31.loc[len(df31)] = ['Base 31 Model', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/10
2023-11-26 13:04:02.043106: I tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:424] Loaded cuDNN version 8900 2023-11-26 13:04:02.902544: I tensorflow/compiler/xla/service/service.cc:169] XLA service 0x7f53234fb0b0 initialized for platform CUDA (this does not guarantee that XLA will be used). Devices: 2023-11-26 13:04:02.902589: I tensorflow/compiler/xla/service/service.cc:177] StreamExecutor device (0): Tesla V100-SXM2-16GB, Compute Capability 7.0 2023-11-26 13:04:02.910062: I tensorflow/compiler/mlir/tensorflow/utils/dump_mlir_util.cc:269] disabling MLIR crash reproducer, set env var `MLIR_CRASH_REPRODUCER_DIRECTORY` to enable. 2023-11-26 13:04:03.072317: I ./tensorflow/compiler/jit/device_compiler.h:180] Compiled cluster using XLA! This line is logged at most once for the lifetime of the process.
283/283 - 6s - loss: 2.3945 - accuracy: 0.1946 - val_loss: 2.2241 - val_accuracy: 0.3070 - 6s/epoch - 20ms/step Epoch 2/10 283/283 - 1s - loss: 1.8913 - accuracy: 0.3858 - val_loss: 1.9207 - val_accuracy: 0.3587 - 1s/epoch - 4ms/step Epoch 3/10 283/283 - 1s - loss: 1.6249 - accuracy: 0.4708 - val_loss: 1.7104 - val_accuracy: 0.4443 - 1s/epoch - 4ms/step Epoch 4/10 283/283 - 1s - loss: 1.3880 - accuracy: 0.5430 - val_loss: 1.4243 - val_accuracy: 0.5307 - 1s/epoch - 4ms/step Epoch 5/10 283/283 - 1s - loss: 1.2080 - accuracy: 0.6014 - val_loss: 1.2698 - val_accuracy: 0.5850 - 1s/epoch - 4ms/step Epoch 6/10 283/283 - 1s - loss: 1.0624 - accuracy: 0.6492 - val_loss: 1.0915 - val_accuracy: 0.6340 - 1s/epoch - 4ms/step Epoch 7/10 283/283 - 1s - loss: 0.9394 - accuracy: 0.6905 - val_loss: 1.1373 - val_accuracy: 0.6290 - 1s/epoch - 4ms/step Epoch 8/10 283/283 - 1s - loss: 0.8452 - accuracy: 0.7226 - val_loss: 1.1535 - val_accuracy: 0.6383 - 1s/epoch - 4ms/step Epoch 9/10 283/283 - 1s - loss: 0.7634 - accuracy: 0.7529 - val_loss: 1.0070 - val_accuracy: 0.6637 - 1s/epoch - 4ms/step Epoch 10/10 283/283 - 1s - loss: 0.6793 - accuracy: 0.7801 - val_loss: 1.5060 - val_accuracy: 0.5377 - 1s/epoch - 4ms/step Baseline Error: 46.23%
model_31.save('Base31Model.h5')
X_train_resized = tf.image.resize(X_train, (128, 128))
X_val_resized = tf.image.resize(X_val, (128, 128))
# Reshape and normalize
X_train128 = X_train_resized.numpy().reshape(X_train_resized.shape[0], 128, 128, 1).astype('float32') / 255
X_val128 = X_val_resized.numpy().reshape(X_val_resized.shape[0], 128, 128, 1).astype('float32') / 255
df128 = pd.DataFrame(columns = ['Model', 'Train Accuracy', 'Validation Accuracy', 'Train Loss', 'Validation Loss', 'History'])
model_128 = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(128, 128, 1)),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.Flatten(),
layers.Dense(64, activation='relu'),
layers.Dense(15, activation='softmax')
])
# Compile the model
model_128.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
# Train the model
history = model_128.fit(X_train128, train_labels, validation_data=(X_val128, validation_labels),
epochs=10, batch_size=32, verbose=2, class_weight = weights)
scores = model_128.evaluate(X_val128, validation_labels, verbose=0)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df128.loc[len(df128)] = ['Base 128 Model', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/10 283/283 - 5s - loss: 1.9846 - accuracy: 0.3630 - val_loss: 1.7195 - val_accuracy: 0.4307 - 5s/epoch - 16ms/step Epoch 2/10 283/283 - 2s - loss: 1.1167 - accuracy: 0.6564 - val_loss: 1.0886 - val_accuracy: 0.6613 - 2s/epoch - 7ms/step Epoch 3/10 283/283 - 2s - loss: 0.6139 - accuracy: 0.8137 - val_loss: 0.7902 - val_accuracy: 0.7620 - 2s/epoch - 7ms/step Epoch 4/10 283/283 - 2s - loss: 0.3018 - accuracy: 0.9094 - val_loss: 0.7090 - val_accuracy: 0.7850 - 2s/epoch - 7ms/step Epoch 5/10 283/283 - 2s - loss: 0.1490 - accuracy: 0.9555 - val_loss: 0.9785 - val_accuracy: 0.7557 - 2s/epoch - 7ms/step Epoch 6/10 283/283 - 2s - loss: 0.1152 - accuracy: 0.9657 - val_loss: 0.8711 - val_accuracy: 0.8013 - 2s/epoch - 7ms/step Epoch 7/10 283/283 - 2s - loss: 0.0481 - accuracy: 0.9869 - val_loss: 1.0326 - val_accuracy: 0.7907 - 2s/epoch - 7ms/step Epoch 8/10 283/283 - 2s - loss: 0.0721 - accuracy: 0.9802 - val_loss: 0.9543 - val_accuracy: 0.8033 - 2s/epoch - 7ms/step Epoch 9/10 283/283 - 2s - loss: 0.0277 - accuracy: 0.9917 - val_loss: 1.1317 - val_accuracy: 0.7967 - 2s/epoch - 7ms/step Epoch 10/10 283/283 - 2s - loss: 0.0453 - accuracy: 0.9855 - val_loss: 1.2959 - val_accuracy: 0.7693 - 2s/epoch - 7ms/step Baseline Error: 23.07%
model_128.save('Base128Model.h5')
As we can see, the base model has achieved a decent accuracy of around 75% for both after 10 epochs. This level of accuracy provides valuable insights into the nature of the dataset and indicates that the dataset is suitable for modeling.
Decent Performance: An accuracy of 75% suggests that the base model is able to correctly predict the target variable for 75% of the instances in the dataset. This indicates a solid performance, but there may still be room for improvement.
Model Feasibility: The achieved accuracy reaffirms that the dataset contains discernible patterns that the model is capturing. The feasibility of modeling is supported by the ability to achieve a reasonably high accuracy.
Baseline for Comparison: The 75% accuracy serves as a baseline for evaluating the performance of more complex models. Any subsequent model should aim to surpass this accuracy to demonstrate significant improvement.
Training Progress: The loss decreases over the 10 epochs, indicating effective learning from the training data. However we should also note the increase in loss after the 5th epoch, as this is the base model there is really no need to take precaution for this specific model, however we should keep in mind that when we are trying other actual model structures, we can use a callback to prevent the model from overfitting.
Validation Loss Interpretation: The validation loss also decreases, suggesting that the model generalizes well to unseen data.
Training Progress: Similar to the other graph, the val-loss decreases after 10 epochs, showing effective learning from the training data.
Validation Loss Interpretation: The validation loss decreases, indicating good generalization to unseen data.
Accuracy Comparison: Both 128x128 and 31x31 models achieve around 75% accuracy, suggesting that the model is performing consistently across different resolutions.
Loss Comparison: The loss graphs for both resolutions show a consistent decrease, indicating effective learning. The smaller graph (31x31) might have slightly more fluctuations, but the overall trend is still downward.
In summary, the base model demonstrates promising performance, however some overfitting can be seen and further optimization and exploration can likely yield even better results.
Data Augmentation:
Batch Normalization:
Learning Rate Scheduling:
Regularization Techniques:
Early Stopping:
In our quest to enhance the performance of our neural network, we'll explore two well-established architectures designed by experts: AlexNet and LeNet.
The input layer will be scaled to fit the images we are using (31x31 and 128x128)
def AlexNet(input_shape, num_classes=15):
model = models.Sequential()
# Convolutional Layer 1
model.add(layers.Conv2D(96, (11, 11), strides=(4, 4), activation='relu', input_shape=(input_shape, input_shape, 1)))
model.add(layers.MaxPooling2D((2, 2), strides=(2, 2)))
# Convolutional Layer 2
model.add(layers.Conv2D(256, (5, 5), padding='same', activation='relu'))
# Adjust the pooling parameters or remove pooling if necessary
# model.add(layers.MaxPooling2D((2, 2), strides=(2, 2)))
# Convolutional Layer 3
model.add(layers.Conv2D(384, (3, 3), padding='same', activation='relu'))
# Convolutional Layer 4
model.add(layers.Conv2D(384, (3, 3), padding='same', activation='relu'))
# Convolutional Layer 5
model.add(layers.Conv2D(256, (3, 3), padding='same', activation='relu'))
model.add(layers.MaxPooling2D((3, 3), strides=(2, 2)))
# Flatten the output before the fully connected layers
model.add(layers.Flatten())
# Fully Connected Layer 1
model.add(layers.Dense(4096, activation='relu'))
# Fully Connected Layer 2
model.add(layers.Dense(4096, activation='relu'))
# Output Layer
model.add(layers.Dense(num_classes, activation='softmax'))
return model
def Lenet(input_shape, num_classes=15):
model = models.Sequential()
# Convolutional Layer 1
model.add(layers.Conv2D(6, (5, 5), activation='tanh', input_shape=(input_shape, input_shape,1)))
model.add(layers.MaxPooling2D((2, 2), strides=(2, 2)))
# Convolutional Layer 2
model.add(layers.Conv2D(16, (5, 5), activation='tanh'))
model.add(layers.MaxPooling2D((2, 2), strides=(2, 2)))
# Flatten the output before the fully connected layers
model.add(layers.Flatten())
# Fully Connected Layer 1
model.add(layers.Dense(120, activation='tanh'))
# Fully Connected Layer 2
model.add(layers.Dense(84, activation='tanh'))
# Output Layer
model.add(layers.Dense(num_classes, activation='softmax'))
return model
# 31x31 LeNet
model = Lenet(31)
model.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train31, train_labels, validation_data=(X_val31, validation_labels),
epochs=10, batch_size=32, verbose=2, class_weight = weights)
scores = model.evaluate(X_val31, validation_labels, verbose=0)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df31.loc[len(df31)] = ['LeNet', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/10 283/283 - 4s - loss: 2.3475 - accuracy: 0.2483 - val_loss: 2.2139 - val_accuracy: 0.3003 - 4s/epoch - 13ms/step Epoch 2/10 283/283 - 1s - loss: 1.9472 - accuracy: 0.3871 - val_loss: 1.9224 - val_accuracy: 0.3863 - 1s/epoch - 4ms/step Epoch 3/10 283/283 - 1s - loss: 1.6955 - accuracy: 0.4649 - val_loss: 1.8224 - val_accuracy: 0.4173 - 1s/epoch - 4ms/step Epoch 4/10 283/283 - 1s - loss: 1.5319 - accuracy: 0.5200 - val_loss: 1.6240 - val_accuracy: 0.4850 - 1s/epoch - 4ms/step Epoch 5/10 283/283 - 1s - loss: 1.3913 - accuracy: 0.5651 - val_loss: 1.5202 - val_accuracy: 0.5277 - 1s/epoch - 4ms/step Epoch 6/10 283/283 - 1s - loss: 1.2777 - accuracy: 0.6025 - val_loss: 1.4312 - val_accuracy: 0.5390 - 1s/epoch - 4ms/step Epoch 7/10 283/283 - 1s - loss: 1.1565 - accuracy: 0.6382 - val_loss: 1.3474 - val_accuracy: 0.5633 - 1s/epoch - 4ms/step Epoch 8/10 283/283 - 1s - loss: 1.0620 - accuracy: 0.6674 - val_loss: 1.3021 - val_accuracy: 0.5867 - 1s/epoch - 4ms/step Epoch 9/10 283/283 - 1s - loss: 0.9834 - accuracy: 0.6953 - val_loss: 1.2106 - val_accuracy: 0.6163 - 1s/epoch - 4ms/step Epoch 10/10 283/283 - 1s - loss: 0.8788 - accuracy: 0.7286 - val_loss: 1.3322 - val_accuracy: 0.5760 - 1s/epoch - 4ms/step Baseline Error: 42.40%
# 31x31 alexnet
model = AlexNet(31)
model.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train31, train_labels, validation_data=(X_val31, validation_labels),
epochs=10, batch_size=32, verbose=2, class_weight = weights)
scores = model.evaluate(X_val31, validation_labels, verbose=0)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df31.loc[len(df31)] = ['AlexNet', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/10 283/283 - 6s - loss: 2.6459 - accuracy: 0.0984 - val_loss: 2.7833 - val_accuracy: 0.0667 - 6s/epoch - 20ms/step Epoch 2/10 283/283 - 2s - loss: 2.6401 - accuracy: 0.1031 - val_loss: 2.7889 - val_accuracy: 0.0667 - 2s/epoch - 6ms/step Epoch 3/10 283/283 - 2s - loss: 2.6384 - accuracy: 0.1037 - val_loss: 2.7829 - val_accuracy: 0.0667 - 2s/epoch - 6ms/step Epoch 4/10 283/283 - 2s - loss: 2.6379 - accuracy: 0.1038 - val_loss: 2.7774 - val_accuracy: 0.0667 - 2s/epoch - 6ms/step Epoch 5/10 283/283 - 2s - loss: 2.6381 - accuracy: 0.1042 - val_loss: 2.7913 - val_accuracy: 0.0667 - 2s/epoch - 6ms/step Epoch 6/10 283/283 - 2s - loss: 2.6378 - accuracy: 0.1053 - val_loss: 2.7847 - val_accuracy: 0.0667 - 2s/epoch - 6ms/step Epoch 7/10 283/283 - 2s - loss: 2.6373 - accuracy: 0.1025 - val_loss: 2.7908 - val_accuracy: 0.0667 - 2s/epoch - 6ms/step Epoch 8/10 283/283 - 2s - loss: 2.6372 - accuracy: 0.1033 - val_loss: 2.8041 - val_accuracy: 0.0667 - 2s/epoch - 6ms/step Epoch 9/10 283/283 - 2s - loss: 2.6376 - accuracy: 0.1041 - val_loss: 2.7971 - val_accuracy: 0.0667 - 2s/epoch - 6ms/step Epoch 10/10 283/283 - 2s - loss: 2.6371 - accuracy: 0.1058 - val_loss: 2.7860 - val_accuracy: 0.0667 - 2s/epoch - 6ms/step Baseline Error: 93.33%
df31.sort_values(by=['Validation Accuracy'],ascending=False)
| Model Name | Train Accuracy | Validation Accuracy | Train Loss | Validation Loss | History | |
|---|---|---|---|---|---|---|
| 1 | LeNet | 0.728622 | 0.576000 | 0.878840 | 1.332178 | <keras.callbacks.History object at 0x7f5a0c5dd... |
| 0 | Base 31 Model | 0.780128 | 0.537667 | 0.679253 | 1.505984 | <keras.callbacks.History object at 0x7f5a1785d... |
| 2 | AlexNet | 0.105782 | 0.066667 | 2.637130 | 2.785964 | <keras.callbacks.History object at 0x7f5a0c3bb... |
| Model Name | Train Accuracy | Validation Accuracy | Train Loss | Validation Loss | Evaluation |
|---|---|---|---|---|---|
| Base 31 Model | 78.11% | 68.77% | 0.6784 | 1.0055 | Reasonable performance, slight overfitting. |
| LeNet | 73.01% | 57.37% | 0.8786 | 1.3403 | Signs of overfitting, might perform better with regularization. |
| AlexNet | 10.58% | 6.67% | 2.6364 | 2.7977 | Poor performance. |
model = Lenet(128)
model.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train128, train_labels, validation_data=(X_val128, validation_labels),
epochs=10, batch_size=32, verbose=2, class_weight = weights)
scores = model.evaluate(X_val128, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df128.loc[len(df128)] = ['LeNet', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/10 283/283 - 4s - loss: 1.7415 - accuracy: 0.4465 - val_loss: 1.4099 - val_accuracy: 0.5540 - 4s/epoch - 13ms/step Epoch 2/10 283/283 - 1s - loss: 0.9844 - accuracy: 0.6986 - val_loss: 1.0954 - val_accuracy: 0.6657 - 1s/epoch - 5ms/step Epoch 3/10 283/283 - 1s - loss: 0.5125 - accuracy: 0.8605 - val_loss: 1.0119 - val_accuracy: 0.6897 - 1s/epoch - 5ms/step Epoch 4/10 283/283 - 1s - loss: 0.2301 - accuracy: 0.9485 - val_loss: 1.1692 - val_accuracy: 0.6587 - 1s/epoch - 5ms/step Epoch 5/10 283/283 - 1s - loss: 0.0941 - accuracy: 0.9842 - val_loss: 1.0573 - val_accuracy: 0.7023 - 1s/epoch - 5ms/step Epoch 6/10 283/283 - 1s - loss: 0.0371 - accuracy: 0.9963 - val_loss: 1.1030 - val_accuracy: 0.7083 - 1s/epoch - 5ms/step Epoch 7/10 283/283 - 1s - loss: 0.0154 - accuracy: 0.9993 - val_loss: 1.1506 - val_accuracy: 0.7007 - 1s/epoch - 5ms/step Epoch 8/10 283/283 - 1s - loss: 0.0083 - accuracy: 1.0000 - val_loss: 1.1598 - val_accuracy: 0.7057 - 1s/epoch - 5ms/step Epoch 9/10 283/283 - 1s - loss: 0.0052 - accuracy: 1.0000 - val_loss: 1.1819 - val_accuracy: 0.7057 - 1s/epoch - 5ms/step Epoch 10/10 283/283 - 1s - loss: 0.0036 - accuracy: 1.0000 - val_loss: 1.2037 - val_accuracy: 0.7040 - 1s/epoch - 5ms/step 94/94 - 0s - loss: 1.2037 - accuracy: 0.7040 - 265ms/epoch - 3ms/step Baseline Error: 29.60%
model = AlexNet(128)
model.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train128, train_labels, validation_data=(X_val128, validation_labels),
epochs=10, batch_size=32, verbose=2, class_weight = weights)
scores = model.evaluate(X_val128, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df128.loc[len(df128)] = ['AlexNet', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/10 283/283 - 8s - loss: 2.6561 - accuracy: 0.0963 - val_loss: 2.7892 - val_accuracy: 0.0667 - 8s/epoch - 28ms/step Epoch 2/10 283/283 - 4s - loss: 2.6407 - accuracy: 0.1023 - val_loss: 2.7942 - val_accuracy: 0.0667 - 4s/epoch - 14ms/step Epoch 3/10 283/283 - 4s - loss: 2.6387 - accuracy: 0.1002 - val_loss: 2.7831 - val_accuracy: 0.0667 - 4s/epoch - 14ms/step Epoch 4/10 283/283 - 4s - loss: 2.6380 - accuracy: 0.1041 - val_loss: 2.7782 - val_accuracy: 0.0667 - 4s/epoch - 14ms/step Epoch 5/10 283/283 - 4s - loss: 2.6379 - accuracy: 0.1042 - val_loss: 2.7914 - val_accuracy: 0.0667 - 4s/epoch - 14ms/step Epoch 6/10 283/283 - 4s - loss: 2.6378 - accuracy: 0.1054 - val_loss: 2.7871 - val_accuracy: 0.0667 - 4s/epoch - 14ms/step Epoch 7/10 283/283 - 4s - loss: 2.6373 - accuracy: 0.1025 - val_loss: 2.7928 - val_accuracy: 0.0667 - 4s/epoch - 14ms/step Epoch 8/10 283/283 - 4s - loss: 2.6372 - accuracy: 0.1038 - val_loss: 2.8032 - val_accuracy: 0.0667 - 4s/epoch - 14ms/step Epoch 9/10 283/283 - 4s - loss: 2.6376 - accuracy: 0.1040 - val_loss: 2.7959 - val_accuracy: 0.0667 - 4s/epoch - 14ms/step Epoch 10/10 283/283 - 4s - loss: 2.6371 - accuracy: 0.1058 - val_loss: 2.7880 - val_accuracy: 0.0667 - 4s/epoch - 14ms/step 94/94 - 0s - loss: 2.7880 - accuracy: 0.0667 - 451ms/epoch - 5ms/step Baseline Error: 93.33%
df128
| Model | Train Accuracy | Validation Accuracy | Train Loss | Validation Loss | History | |
|---|---|---|---|---|---|---|
| 0 | Base 128 Model | 0.985490 | 0.769333 | 0.045317 | 1.295857 | <keras.callbacks.History object at 0x7f5a16d74... |
| 1 | LeNet | 1.000000 | 0.704000 | 0.003636 | 1.203718 | <keras.callbacks.History object at 0x7f59ec532... |
| 2 | AlexNet | 0.105782 | 0.066667 | 2.637087 | 2.787983 | <keras.callbacks.History object at 0x7f59ec79e... |
| Model | Train Accuracy | Validation Accuracy | Train Loss | Validation Loss | Evaluation |
|---|---|---|---|---|---|
| Base 128 Model | 98.28% | 76.63% | 0.05399 | 1.32059 | High accuracy, potential overfitting, check for regularization. |
| LeNet | 98.90% | 69.73% | 0.10311 | 1.08369 | High accuracy, potential overfitting regularization might be good. |
| AlexNet | 10.58% | 6.67% | 2.63660 | 2.79447 | Poor performance |
Recall the previous markdown involving methods of improving our models, we can now try to implement some of these
Data Augmentation:
Batch Normalization:
Learning Rate Scheduling:
Regularization Techniques:
Early Stopping:
Adjusting Weights:
We can easily calculate the weights for each class by dividing the total number of images by the number of images in each class. We can then use these weights to adjust the loss function during training.
```python
#calculate class weights for each label
train_class['weights'] = (1/train_class['count']) * (sum(train_class['count'])/len(train_data.class_names))
class_weight = train_class['weights'].to_dict()
As we are not actually using the LeNet and AlexNet models, we will not be implementing the regularization techniques for them. However we will be implementing them for the base model in order to try and improve the performance of the model. As a bonus, we should also use the class weights that we previously computed in order to accomodate for the imbalanced dataset.
# model_31 = models.Sequential([
# layers.Conv2D(32, (3, 3), activation='relu', input_shape=(31, 31, 1)),
# layers.MaxPooling2D((2, 2)),
# layers.Conv2D(64, (3, 3), activation='relu'),
# layers.MaxPooling2D((2, 2)),
# layers.Conv2D(64, (3, 3), activation='relu'),
# layers.Flatten(),
# layers.Dense(64, activation='relu'),
# layers.Dense(15, activation='softmax')
# ])
#this is the original "base model" that we used to overcome the overfitting we can apply regularization
# to improve teh accuracy we should also increase the number of epochs
model_31new = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(31, 31, 1)),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dense(15, activation='softmax')
])
model_31new.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model_31new.fit(X_train31, train_labels, validation_data=(X_val31, validation_labels),
epochs=100, batch_size=32, verbose=2, class_weight = class_weight)
scores = model_31new.evaluate(X_val31, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df31.loc[len(df31)] = ['RegularizedModel', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/100 283/283 - 3s - loss: 2.9040 - accuracy: 0.1603 - val_loss: 2.6126 - val_accuracy: 0.1637 - 3s/epoch - 11ms/step Epoch 2/100 283/283 - 1s - loss: 2.3496 - accuracy: 0.3056 - val_loss: 2.0725 - val_accuracy: 0.3460 - 1s/epoch - 4ms/step Epoch 3/100 283/283 - 1s - loss: 1.9670 - accuracy: 0.4129 - val_loss: 1.8954 - val_accuracy: 0.3897 - 1s/epoch - 4ms/step Epoch 4/100 283/283 - 1s - loss: 1.6449 - accuracy: 0.5177 - val_loss: 1.4937 - val_accuracy: 0.5353 - 1s/epoch - 4ms/step Epoch 5/100 283/283 - 1s - loss: 1.4355 - accuracy: 0.5853 - val_loss: 1.6425 - val_accuracy: 0.5177 - 1s/epoch - 4ms/step Epoch 6/100 283/283 - 1s - loss: 1.2782 - accuracy: 0.6371 - val_loss: 1.2124 - val_accuracy: 0.6377 - 1s/epoch - 4ms/step Epoch 7/100 283/283 - 1s - loss: 1.1504 - accuracy: 0.6698 - val_loss: 1.1914 - val_accuracy: 0.6370 - 1s/epoch - 4ms/step Epoch 8/100 283/283 - 1s - loss: 1.0742 - accuracy: 0.6893 - val_loss: 1.4247 - val_accuracy: 0.5690 - 1s/epoch - 4ms/step Epoch 9/100 283/283 - 1s - loss: 1.0145 - accuracy: 0.7117 - val_loss: 1.1554 - val_accuracy: 0.6557 - 1s/epoch - 4ms/step Epoch 10/100 283/283 - 1s - loss: 0.8961 - accuracy: 0.7467 - val_loss: 1.0680 - val_accuracy: 0.6840 - 1s/epoch - 4ms/step Epoch 11/100 283/283 - 1s - loss: 0.8263 - accuracy: 0.7687 - val_loss: 1.0278 - val_accuracy: 0.7077 - 1s/epoch - 4ms/step Epoch 12/100 283/283 - 1s - loss: 0.7837 - accuracy: 0.7794 - val_loss: 1.0226 - val_accuracy: 0.7207 - 1s/epoch - 4ms/step Epoch 13/100 283/283 - 1s - loss: 0.7141 - accuracy: 0.8056 - val_loss: 1.0005 - val_accuracy: 0.7190 - 1s/epoch - 4ms/step Epoch 14/100 283/283 - 1s - loss: 0.6749 - accuracy: 0.8136 - val_loss: 0.9839 - val_accuracy: 0.7360 - 1s/epoch - 4ms/step Epoch 15/100 283/283 - 1s - loss: 0.6323 - accuracy: 0.8241 - val_loss: 0.9788 - val_accuracy: 0.7250 - 1s/epoch - 4ms/step Epoch 16/100 283/283 - 1s - loss: 0.5862 - accuracy: 0.8419 - val_loss: 1.2402 - val_accuracy: 0.6657 - 1s/epoch - 4ms/step Epoch 17/100 283/283 - 1s - loss: 0.5957 - accuracy: 0.8403 - val_loss: 0.8912 - val_accuracy: 0.7617 - 1s/epoch - 4ms/step Epoch 18/100 283/283 - 1s - loss: 0.5359 - accuracy: 0.8604 - val_loss: 0.8845 - val_accuracy: 0.7753 - 1s/epoch - 4ms/step Epoch 19/100 283/283 - 1s - loss: 0.4801 - accuracy: 0.8765 - val_loss: 1.0016 - val_accuracy: 0.7450 - 1s/epoch - 4ms/step Epoch 20/100 283/283 - 1s - loss: 0.4763 - accuracy: 0.8759 - val_loss: 0.9191 - val_accuracy: 0.7763 - 1s/epoch - 5ms/step Epoch 21/100 283/283 - 1s - loss: 0.4283 - accuracy: 0.8929 - val_loss: 0.9007 - val_accuracy: 0.7840 - 1s/epoch - 4ms/step Epoch 22/100 283/283 - 1s - loss: 0.4200 - accuracy: 0.8965 - val_loss: 1.0420 - val_accuracy: 0.7533 - 1s/epoch - 4ms/step Epoch 23/100 283/283 - 1s - loss: 0.4031 - accuracy: 0.8999 - val_loss: 1.0916 - val_accuracy: 0.7257 - 1s/epoch - 4ms/step Epoch 24/100 283/283 - 1s - loss: 0.3992 - accuracy: 0.9010 - val_loss: 0.8774 - val_accuracy: 0.7943 - 1s/epoch - 4ms/step Epoch 25/100 283/283 - 1s - loss: 0.3751 - accuracy: 0.9082 - val_loss: 0.9148 - val_accuracy: 0.7803 - 1s/epoch - 4ms/step Epoch 26/100 283/283 - 1s - loss: 0.3647 - accuracy: 0.9170 - val_loss: 0.9116 - val_accuracy: 0.7920 - 1s/epoch - 4ms/step Epoch 27/100 283/283 - 1s - loss: 0.3553 - accuracy: 0.9170 - val_loss: 0.9156 - val_accuracy: 0.7817 - 1s/epoch - 4ms/step Epoch 28/100 283/283 - 1s - loss: 0.3553 - accuracy: 0.9222 - val_loss: 0.8456 - val_accuracy: 0.8017 - 1s/epoch - 4ms/step Epoch 29/100 283/283 - 1s - loss: 0.2790 - accuracy: 0.9436 - val_loss: 0.9477 - val_accuracy: 0.7830 - 1s/epoch - 4ms/step Epoch 30/100 283/283 - 1s - loss: 0.2785 - accuracy: 0.9417 - val_loss: 0.9205 - val_accuracy: 0.8020 - 1s/epoch - 4ms/step Epoch 31/100 283/283 - 1s - loss: 0.2923 - accuracy: 0.9399 - val_loss: 0.9582 - val_accuracy: 0.7960 - 1s/epoch - 4ms/step Epoch 32/100 283/283 - 1s - loss: 0.3059 - accuracy: 0.9363 - val_loss: 1.1586 - val_accuracy: 0.7507 - 1s/epoch - 4ms/step Epoch 33/100 283/283 - 1s - loss: 0.2803 - accuracy: 0.9409 - val_loss: 1.0094 - val_accuracy: 0.7833 - 1s/epoch - 4ms/step Epoch 34/100 283/283 - 1s - loss: 0.2728 - accuracy: 0.9428 - val_loss: 1.0678 - val_accuracy: 0.7720 - 1s/epoch - 4ms/step Epoch 35/100 283/283 - 1s - loss: 0.2742 - accuracy: 0.9454 - val_loss: 0.9388 - val_accuracy: 0.8123 - 1s/epoch - 4ms/step Epoch 36/100 283/283 - 1s - loss: 0.2674 - accuracy: 0.9476 - val_loss: 0.9678 - val_accuracy: 0.8003 - 1s/epoch - 4ms/step Epoch 37/100 283/283 - 1s - loss: 0.2332 - accuracy: 0.9599 - val_loss: 1.0466 - val_accuracy: 0.7847 - 1s/epoch - 4ms/step Epoch 38/100 283/283 - 1s - loss: 0.2561 - accuracy: 0.9497 - val_loss: 1.0662 - val_accuracy: 0.7863 - 1s/epoch - 4ms/step Epoch 39/100 283/283 - 1s - loss: 0.2265 - accuracy: 0.9605 - val_loss: 1.0283 - val_accuracy: 0.8040 - 1s/epoch - 4ms/step Epoch 40/100 283/283 - 1s - loss: 0.2234 - accuracy: 0.9610 - val_loss: 0.9668 - val_accuracy: 0.8257 - 1s/epoch - 4ms/step Epoch 41/100 283/283 - 1s - loss: 0.2363 - accuracy: 0.9565 - val_loss: 1.3403 - val_accuracy: 0.7463 - 1s/epoch - 4ms/step Epoch 42/100 283/283 - 1s - loss: 0.2284 - accuracy: 0.9593 - val_loss: 0.9845 - val_accuracy: 0.8140 - 1s/epoch - 4ms/step Epoch 43/100 283/283 - 1s - loss: 0.2112 - accuracy: 0.9661 - val_loss: 1.0936 - val_accuracy: 0.8020 - 1s/epoch - 4ms/step Epoch 44/100 283/283 - 1s - loss: 0.1736 - accuracy: 0.9782 - val_loss: 1.0207 - val_accuracy: 0.8233 - 1s/epoch - 4ms/step Epoch 45/100 283/283 - 1s - loss: 0.2062 - accuracy: 0.9668 - val_loss: 1.1472 - val_accuracy: 0.7943 - 1s/epoch - 4ms/step Epoch 46/100 283/283 - 1s - loss: 0.3048 - accuracy: 0.9364 - val_loss: 1.3788 - val_accuracy: 0.7447 - 1s/epoch - 4ms/step Epoch 47/100 283/283 - 1s - loss: 0.2304 - accuracy: 0.9597 - val_loss: 1.1196 - val_accuracy: 0.7870 - 1s/epoch - 4ms/step Epoch 48/100 283/283 - 1s - loss: 0.1547 - accuracy: 0.9863 - val_loss: 0.9935 - val_accuracy: 0.8313 - 1s/epoch - 4ms/step Epoch 49/100 283/283 - 1s - loss: 0.1456 - accuracy: 0.9888 - val_loss: 0.9410 - val_accuracy: 0.8340 - 1s/epoch - 4ms/step Epoch 50/100 283/283 - 1s - loss: 0.2674 - accuracy: 0.9458 - val_loss: 1.0468 - val_accuracy: 0.8030 - 1s/epoch - 4ms/step Epoch 51/100 283/283 - 1s - loss: 0.2777 - accuracy: 0.9474 - val_loss: 0.9536 - val_accuracy: 0.8293 - 1s/epoch - 4ms/step Epoch 52/100 283/283 - 1s - loss: 0.1534 - accuracy: 0.9866 - val_loss: 0.9191 - val_accuracy: 0.8373 - 1s/epoch - 4ms/step Epoch 53/100 283/283 - 1s - loss: 0.1385 - accuracy: 0.9908 - val_loss: 0.9717 - val_accuracy: 0.8390 - 1s/epoch - 4ms/step Epoch 54/100 283/283 - 1s - loss: 0.2053 - accuracy: 0.9700 - val_loss: 1.3681 - val_accuracy: 0.7437 - 1s/epoch - 4ms/step Epoch 55/100 283/283 - 1s - loss: 0.2513 - accuracy: 0.9529 - val_loss: 1.0173 - val_accuracy: 0.8163 - 1s/epoch - 4ms/step Epoch 56/100 283/283 - 1s - loss: 0.1437 - accuracy: 0.9874 - val_loss: 1.0223 - val_accuracy: 0.8287 - 1s/epoch - 4ms/step Epoch 57/100 283/283 - 1s - loss: 0.1913 - accuracy: 0.9708 - val_loss: 1.1192 - val_accuracy: 0.8100 - 1s/epoch - 4ms/step Epoch 58/100 283/283 - 1s - loss: 0.2272 - accuracy: 0.9584 - val_loss: 1.0534 - val_accuracy: 0.8147 - 1s/epoch - 4ms/step Epoch 59/100 283/283 - 1s - loss: 0.2448 - accuracy: 0.9540 - val_loss: 1.0917 - val_accuracy: 0.8117 - 1s/epoch - 4ms/step Epoch 60/100 283/283 - 1s - loss: 0.1365 - accuracy: 0.9908 - val_loss: 1.0214 - val_accuracy: 0.8377 - 1s/epoch - 4ms/step Epoch 61/100 283/283 - 1s - loss: 0.1108 - accuracy: 0.9990 - val_loss: 0.9741 - val_accuracy: 0.8513 - 1s/epoch - 4ms/step Epoch 62/100 283/283 - 1s - loss: 0.1058 - accuracy: 0.9982 - val_loss: 0.9876 - val_accuracy: 0.8457 - 1s/epoch - 4ms/step Epoch 63/100 283/283 - 1s - loss: 0.2986 - accuracy: 0.9371 - val_loss: 1.1669 - val_accuracy: 0.7910 - 1s/epoch - 4ms/step Epoch 64/100 283/283 - 1s - loss: 0.1726 - accuracy: 0.9764 - val_loss: 1.0821 - val_accuracy: 0.8123 - 1s/epoch - 4ms/step Epoch 65/100 283/283 - 1s - loss: 0.2194 - accuracy: 0.9616 - val_loss: 0.9538 - val_accuracy: 0.8303 - 1s/epoch - 4ms/step Epoch 66/100 283/283 - 1s - loss: 0.1297 - accuracy: 0.9897 - val_loss: 1.0374 - val_accuracy: 0.8263 - 1s/epoch - 4ms/step Epoch 67/100 283/283 - 1s - loss: 0.1989 - accuracy: 0.9668 - val_loss: 1.2006 - val_accuracy: 0.7837 - 1s/epoch - 4ms/step Epoch 68/100 283/283 - 1s - loss: 0.1846 - accuracy: 0.9714 - val_loss: 1.1019 - val_accuracy: 0.8040 - 1s/epoch - 4ms/step Epoch 69/100 283/283 - 1s - loss: 0.1686 - accuracy: 0.9768 - val_loss: 1.0287 - val_accuracy: 0.8280 - 1s/epoch - 4ms/step Epoch 70/100 283/283 - 1s - loss: 0.1376 - accuracy: 0.9876 - val_loss: 1.4380 - val_accuracy: 0.7593 - 1s/epoch - 4ms/step Epoch 71/100 283/283 - 1s - loss: 0.3305 - accuracy: 0.9399 - val_loss: 1.0856 - val_accuracy: 0.8107 - 1s/epoch - 4ms/step Epoch 72/100 283/283 - 1s - loss: 0.1367 - accuracy: 0.9879 - val_loss: 1.0187 - val_accuracy: 0.8247 - 1s/epoch - 4ms/step Epoch 73/100 283/283 - 1s - loss: 0.1077 - accuracy: 0.9977 - val_loss: 0.9249 - val_accuracy: 0.8497 - 1s/epoch - 4ms/step Epoch 74/100 283/283 - 1s - loss: 0.0960 - accuracy: 0.9997 - val_loss: 0.9571 - val_accuracy: 0.8480 - 1s/epoch - 4ms/step Epoch 75/100 283/283 - 1s - loss: 0.0896 - accuracy: 0.9999 - val_loss: 0.9460 - val_accuracy: 0.8503 - 1s/epoch - 4ms/step Epoch 76/100 283/283 - 1s - loss: 0.0839 - accuracy: 1.0000 - val_loss: 0.9172 - val_accuracy: 0.8530 - 1s/epoch - 4ms/step Epoch 77/100 283/283 - 1s - loss: 0.1749 - accuracy: 0.9756 - val_loss: 1.4209 - val_accuracy: 0.7100 - 1s/epoch - 4ms/step Epoch 78/100 283/283 - 1s - loss: 0.3389 - accuracy: 0.9157 - val_loss: 0.9443 - val_accuracy: 0.8233 - 1s/epoch - 4ms/step Epoch 79/100 283/283 - 1s - loss: 0.1689 - accuracy: 0.9719 - val_loss: 1.0381 - val_accuracy: 0.8113 - 1s/epoch - 4ms/step Epoch 80/100 283/283 - 1s - loss: 0.1420 - accuracy: 0.9838 - val_loss: 1.0395 - val_accuracy: 0.8057 - 1s/epoch - 4ms/step Epoch 81/100 283/283 - 1s - loss: 0.1464 - accuracy: 0.9807 - val_loss: 0.9728 - val_accuracy: 0.8333 - 1s/epoch - 4ms/step Epoch 82/100 283/283 - 1s - loss: 0.2286 - accuracy: 0.9566 - val_loss: 1.0039 - val_accuracy: 0.8113 - 1s/epoch - 4ms/step Epoch 83/100 283/283 - 1s - loss: 0.1534 - accuracy: 0.9793 - val_loss: 0.9491 - val_accuracy: 0.8370 - 1s/epoch - 4ms/step Epoch 84/100 283/283 - 1s - loss: 0.0978 - accuracy: 0.9986 - val_loss: 0.9322 - val_accuracy: 0.8480 - 1s/epoch - 4ms/step Epoch 85/100 283/283 - 1s - loss: 0.0874 - accuracy: 0.9999 - val_loss: 0.9261 - val_accuracy: 0.8523 - 1s/epoch - 4ms/step Epoch 86/100 283/283 - 1s - loss: 0.0815 - accuracy: 0.9999 - val_loss: 0.9294 - val_accuracy: 0.8523 - 1s/epoch - 4ms/step Epoch 87/100 283/283 - 1s - loss: 0.0762 - accuracy: 1.0000 - val_loss: 0.9334 - val_accuracy: 0.8480 - 1s/epoch - 4ms/step Epoch 88/100 283/283 - 1s - loss: 0.0710 - accuracy: 1.0000 - val_loss: 0.9225 - val_accuracy: 0.8507 - 1s/epoch - 4ms/step Epoch 89/100 283/283 - 1s - loss: 0.0671 - accuracy: 0.9996 - val_loss: 0.9600 - val_accuracy: 0.8433 - 1s/epoch - 4ms/step Epoch 90/100 283/283 - 1s - loss: 0.4290 - accuracy: 0.8913 - val_loss: 0.9699 - val_accuracy: 0.8000 - 1s/epoch - 4ms/step Epoch 91/100 283/283 - 1s - loss: 0.1875 - accuracy: 0.9621 - val_loss: 1.0075 - val_accuracy: 0.8177 - 1s/epoch - 4ms/step Epoch 92/100 283/283 - 1s - loss: 0.1202 - accuracy: 0.9874 - val_loss: 0.8819 - val_accuracy: 0.8357 - 1s/epoch - 4ms/step Epoch 93/100 283/283 - 1s - loss: 0.0878 - accuracy: 0.9986 - val_loss: 0.9248 - val_accuracy: 0.8450 - 1s/epoch - 4ms/step Epoch 94/100 283/283 - 1s - loss: 0.0786 - accuracy: 0.9994 - val_loss: 0.8921 - val_accuracy: 0.8533 - 1s/epoch - 4ms/step Epoch 95/100 283/283 - 1s - loss: 0.0720 - accuracy: 0.9997 - val_loss: 0.8870 - val_accuracy: 0.8563 - 1s/epoch - 4ms/step Epoch 96/100 283/283 - 1s - loss: 0.0663 - accuracy: 1.0000 - val_loss: 0.8915 - val_accuracy: 0.8570 - 1s/epoch - 4ms/step Epoch 97/100 283/283 - 1s - loss: 0.0612 - accuracy: 1.0000 - val_loss: 0.8999 - val_accuracy: 0.8523 - 1s/epoch - 4ms/step Epoch 98/100 283/283 - 1s - loss: 0.0569 - accuracy: 1.0000 - val_loss: 0.8977 - val_accuracy: 0.8527 - 1s/epoch - 4ms/step Epoch 99/100 283/283 - 1s - loss: 0.4509 - accuracy: 0.8892 - val_loss: 1.1631 - val_accuracy: 0.7547 - 1s/epoch - 4ms/step Epoch 100/100 283/283 - 1s - loss: 0.1816 - accuracy: 0.9638 - val_loss: 1.0630 - val_accuracy: 0.8053 - 1s/epoch - 4ms/step 94/94 - 0s - loss: 1.0630 - accuracy: 0.8053 - 196ms/epoch - 2ms/step Baseline Error: 19.47%
As seen from the graph, the learning of the model starts decreasing after around 30 epochs, we can try a learning rate scheduler to see if we can improve the performance of the model. As there is still some overfitting, we can also try to implement dropout in order to reduce the overfitting.
As the 31x31 images are simple, we will try not to overcomplicate the model for now and look for easier solutions to this problem.
#First test with dropout
model_31new = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(31, 31, 1)),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dropout(0.5), # Add dropout with dropout rate of 0.5
layers.Dense(15, activation='softmax')
])
model_31new.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model_31new.fit(X_train31, train_labels, validation_data=(X_val31, validation_labels),
epochs=100, batch_size=32, verbose=2, class_weight = class_weight)
scores = model_31new.evaluate(X_val31, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df31.loc[len(df31)] = ['RegularizedDOModel', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/100 283/283 - 3s - loss: 3.0926 - accuracy: 0.1304 - val_loss: 2.6872 - val_accuracy: 0.1233 - 3s/epoch - 11ms/step Epoch 2/100 283/283 - 1s - loss: 2.6139 - accuracy: 0.2318 - val_loss: 2.3090 - val_accuracy: 0.2317 - 1s/epoch - 4ms/step Epoch 3/100 283/283 - 1s - loss: 2.3558 - accuracy: 0.2888 - val_loss: 2.2681 - val_accuracy: 0.2803 - 1s/epoch - 4ms/step Epoch 4/100 283/283 - 1s - loss: 2.1560 - accuracy: 0.3432 - val_loss: 1.9635 - val_accuracy: 0.3587 - 1s/epoch - 4ms/step Epoch 5/100 283/283 - 1s - loss: 2.0298 - accuracy: 0.3739 - val_loss: 1.8783 - val_accuracy: 0.3883 - 1s/epoch - 4ms/step Epoch 6/100 283/283 - 1s - loss: 1.9356 - accuracy: 0.3953 - val_loss: 1.7106 - val_accuracy: 0.4420 - 1s/epoch - 4ms/step Epoch 7/100 283/283 - 1s - loss: 1.8030 - accuracy: 0.4366 - val_loss: 1.7225 - val_accuracy: 0.4383 - 1s/epoch - 4ms/step Epoch 8/100 283/283 - 1s - loss: 1.7490 - accuracy: 0.4527 - val_loss: 1.6495 - val_accuracy: 0.4647 - 1s/epoch - 4ms/step Epoch 9/100 283/283 - 1s - loss: 1.6968 - accuracy: 0.4662 - val_loss: 1.6096 - val_accuracy: 0.4760 - 1s/epoch - 4ms/step Epoch 10/100 283/283 - 1s - loss: 1.5971 - accuracy: 0.4955 - val_loss: 1.4843 - val_accuracy: 0.5170 - 1s/epoch - 4ms/step Epoch 11/100 283/283 - 1s - loss: 1.5600 - accuracy: 0.5047 - val_loss: 1.4185 - val_accuracy: 0.5483 - 1s/epoch - 4ms/step Epoch 12/100 283/283 - 1s - loss: 1.5242 - accuracy: 0.5207 - val_loss: 1.4550 - val_accuracy: 0.5307 - 1s/epoch - 4ms/step Epoch 13/100 283/283 - 1s - loss: 1.4785 - accuracy: 0.5268 - val_loss: 1.3157 - val_accuracy: 0.5787 - 1s/epoch - 4ms/step Epoch 14/100 283/283 - 1s - loss: 1.4198 - accuracy: 0.5457 - val_loss: 1.4691 - val_accuracy: 0.5463 - 1s/epoch - 4ms/step Epoch 15/100 283/283 - 1s - loss: 1.3855 - accuracy: 0.5541 - val_loss: 1.2320 - val_accuracy: 0.6107 - 1s/epoch - 4ms/step Epoch 16/100 283/283 - 1s - loss: 1.3885 - accuracy: 0.5575 - val_loss: 1.3340 - val_accuracy: 0.5907 - 1s/epoch - 4ms/step Epoch 17/100 283/283 - 1s - loss: 1.3450 - accuracy: 0.5697 - val_loss: 1.1889 - val_accuracy: 0.6267 - 1s/epoch - 4ms/step Epoch 18/100 283/283 - 1s - loss: 1.3016 - accuracy: 0.5881 - val_loss: 1.2507 - val_accuracy: 0.6163 - 1s/epoch - 4ms/step Epoch 19/100 283/283 - 1s - loss: 1.2828 - accuracy: 0.5893 - val_loss: 1.2349 - val_accuracy: 0.6093 - 1s/epoch - 4ms/step Epoch 20/100 283/283 - 1s - loss: 1.2424 - accuracy: 0.5951 - val_loss: 1.1769 - val_accuracy: 0.6363 - 1s/epoch - 4ms/step Epoch 21/100 283/283 - 1s - loss: 1.2252 - accuracy: 0.6051 - val_loss: 1.2000 - val_accuracy: 0.6190 - 1s/epoch - 4ms/step Epoch 22/100 283/283 - 1s - loss: 1.1951 - accuracy: 0.6180 - val_loss: 1.1418 - val_accuracy: 0.6477 - 1s/epoch - 4ms/step Epoch 23/100 283/283 - 1s - loss: 1.1619 - accuracy: 0.6303 - val_loss: 1.2412 - val_accuracy: 0.6187 - 1s/epoch - 4ms/step Epoch 24/100 283/283 - 1s - loss: 1.1772 - accuracy: 0.6273 - val_loss: 1.2253 - val_accuracy: 0.6217 - 1s/epoch - 4ms/step Epoch 25/100 283/283 - 1s - loss: 1.1274 - accuracy: 0.6352 - val_loss: 1.0877 - val_accuracy: 0.6797 - 1s/epoch - 4ms/step Epoch 26/100 283/283 - 1s - loss: 1.1090 - accuracy: 0.6379 - val_loss: 1.0402 - val_accuracy: 0.6817 - 1s/epoch - 4ms/step Epoch 27/100 283/283 - 1s - loss: 1.0811 - accuracy: 0.6562 - val_loss: 1.1028 - val_accuracy: 0.6670 - 1s/epoch - 4ms/step Epoch 28/100 283/283 - 1s - loss: 1.0673 - accuracy: 0.6564 - val_loss: 1.1353 - val_accuracy: 0.6457 - 1s/epoch - 4ms/step Epoch 29/100 283/283 - 1s - loss: 1.0493 - accuracy: 0.6556 - val_loss: 1.0969 - val_accuracy: 0.6747 - 1s/epoch - 4ms/step Epoch 30/100 283/283 - 1s - loss: 1.0140 - accuracy: 0.6688 - val_loss: 1.1082 - val_accuracy: 0.6630 - 1s/epoch - 5ms/step Epoch 31/100 283/283 - 1s - loss: 0.9977 - accuracy: 0.6778 - val_loss: 1.0445 - val_accuracy: 0.6903 - 1s/epoch - 4ms/step Epoch 32/100 283/283 - 1s - loss: 0.9672 - accuracy: 0.6871 - val_loss: 1.0117 - val_accuracy: 0.6937 - 1s/epoch - 4ms/step Epoch 33/100 283/283 - 1s - loss: 0.9646 - accuracy: 0.6921 - val_loss: 1.0493 - val_accuracy: 0.6977 - 1s/epoch - 4ms/step Epoch 34/100 283/283 - 1s - loss: 0.9615 - accuracy: 0.6951 - val_loss: 1.1128 - val_accuracy: 0.6733 - 1s/epoch - 4ms/step Epoch 35/100 283/283 - 1s - loss: 0.9295 - accuracy: 0.7015 - val_loss: 1.0132 - val_accuracy: 0.7107 - 1s/epoch - 4ms/step Epoch 36/100 283/283 - 1s - loss: 0.9292 - accuracy: 0.7010 - val_loss: 0.9217 - val_accuracy: 0.7257 - 1s/epoch - 4ms/step Epoch 37/100 283/283 - 1s - loss: 0.9021 - accuracy: 0.7097 - val_loss: 1.0470 - val_accuracy: 0.7023 - 1s/epoch - 4ms/step Epoch 38/100 283/283 - 1s - loss: 0.8687 - accuracy: 0.7198 - val_loss: 1.1369 - val_accuracy: 0.6720 - 1s/epoch - 4ms/step Epoch 39/100 283/283 - 1s - loss: 0.8561 - accuracy: 0.7165 - val_loss: 0.9366 - val_accuracy: 0.7327 - 1s/epoch - 4ms/step Epoch 40/100 283/283 - 1s - loss: 0.8414 - accuracy: 0.7255 - val_loss: 1.0177 - val_accuracy: 0.7077 - 1s/epoch - 4ms/step Epoch 41/100 283/283 - 1s - loss: 0.8514 - accuracy: 0.7288 - val_loss: 1.0834 - val_accuracy: 0.7003 - 1s/epoch - 4ms/step Epoch 42/100 283/283 - 1s - loss: 0.8504 - accuracy: 0.7234 - val_loss: 0.9322 - val_accuracy: 0.7330 - 1s/epoch - 4ms/step Epoch 43/100 283/283 - 1s - loss: 0.8183 - accuracy: 0.7267 - val_loss: 0.9006 - val_accuracy: 0.7480 - 1s/epoch - 4ms/step Epoch 44/100 283/283 - 1s - loss: 0.7856 - accuracy: 0.7473 - val_loss: 1.0261 - val_accuracy: 0.7167 - 1s/epoch - 4ms/step Epoch 45/100 283/283 - 1s - loss: 0.8011 - accuracy: 0.7429 - val_loss: 0.9595 - val_accuracy: 0.7387 - 1s/epoch - 4ms/step Epoch 46/100 283/283 - 1s - loss: 0.7876 - accuracy: 0.7430 - val_loss: 1.0160 - val_accuracy: 0.7213 - 1s/epoch - 4ms/step Epoch 47/100 283/283 - 1s - loss: 0.7545 - accuracy: 0.7552 - val_loss: 0.9526 - val_accuracy: 0.7383 - 1s/epoch - 4ms/step Epoch 48/100 283/283 - 1s - loss: 0.7515 - accuracy: 0.7607 - val_loss: 0.9214 - val_accuracy: 0.7490 - 1s/epoch - 4ms/step Epoch 49/100 283/283 - 1s - loss: 0.7211 - accuracy: 0.7654 - val_loss: 0.8697 - val_accuracy: 0.7640 - 1s/epoch - 4ms/step Epoch 50/100 283/283 - 1s - loss: 0.7012 - accuracy: 0.7719 - val_loss: 1.0443 - val_accuracy: 0.7317 - 1s/epoch - 4ms/step Epoch 51/100 283/283 - 1s - loss: 0.7155 - accuracy: 0.7688 - val_loss: 0.8909 - val_accuracy: 0.7627 - 1s/epoch - 4ms/step Epoch 52/100 283/283 - 1s - loss: 0.6987 - accuracy: 0.7741 - val_loss: 0.8303 - val_accuracy: 0.7783 - 1s/epoch - 4ms/step Epoch 53/100 283/283 - 1s - loss: 0.6922 - accuracy: 0.7714 - val_loss: 0.9513 - val_accuracy: 0.7520 - 1s/epoch - 4ms/step Epoch 54/100 283/283 - 1s - loss: 0.6978 - accuracy: 0.7737 - val_loss: 0.9404 - val_accuracy: 0.7550 - 1s/epoch - 4ms/step Epoch 55/100 283/283 - 1s - loss: 0.6735 - accuracy: 0.7852 - val_loss: 1.1237 - val_accuracy: 0.7180 - 1s/epoch - 4ms/step Epoch 56/100 283/283 - 1s - loss: 0.6302 - accuracy: 0.7966 - val_loss: 0.8594 - val_accuracy: 0.7753 - 1s/epoch - 4ms/step Epoch 57/100 283/283 - 1s - loss: 0.6673 - accuracy: 0.7863 - val_loss: 0.8773 - val_accuracy: 0.7753 - 1s/epoch - 4ms/step Epoch 58/100 283/283 - 1s - loss: 0.6471 - accuracy: 0.7890 - val_loss: 0.9261 - val_accuracy: 0.7530 - 1s/epoch - 4ms/step Epoch 59/100 283/283 - 1s - loss: 0.6455 - accuracy: 0.7939 - val_loss: 1.0639 - val_accuracy: 0.7320 - 1s/epoch - 4ms/step Epoch 60/100 283/283 - 1s - loss: 0.6480 - accuracy: 0.7905 - val_loss: 1.0014 - val_accuracy: 0.7507 - 1s/epoch - 4ms/step Epoch 61/100 283/283 - 1s - loss: 0.6005 - accuracy: 0.8058 - val_loss: 0.9098 - val_accuracy: 0.7720 - 1s/epoch - 4ms/step Epoch 62/100 283/283 - 1s - loss: 0.6057 - accuracy: 0.8063 - val_loss: 0.9143 - val_accuracy: 0.7720 - 1s/epoch - 4ms/step Epoch 63/100 283/283 - 1s - loss: 0.6064 - accuracy: 0.8022 - val_loss: 0.8476 - val_accuracy: 0.7910 - 1s/epoch - 4ms/step Epoch 64/100 283/283 - 1s - loss: 0.5839 - accuracy: 0.8138 - val_loss: 0.9319 - val_accuracy: 0.7783 - 1s/epoch - 4ms/step Epoch 65/100 283/283 - 1s - loss: 0.5750 - accuracy: 0.8166 - val_loss: 1.0177 - val_accuracy: 0.7583 - 1s/epoch - 4ms/step Epoch 66/100 283/283 - 1s - loss: 0.5767 - accuracy: 0.8207 - val_loss: 0.8622 - val_accuracy: 0.7900 - 1s/epoch - 4ms/step Epoch 67/100 283/283 - 1s - loss: 0.5550 - accuracy: 0.8196 - val_loss: 0.9330 - val_accuracy: 0.7763 - 1s/epoch - 4ms/step Epoch 68/100 283/283 - 1s - loss: 0.5714 - accuracy: 0.8192 - val_loss: 0.9594 - val_accuracy: 0.7690 - 1s/epoch - 4ms/step Epoch 69/100 283/283 - 1s - loss: 0.5510 - accuracy: 0.8209 - val_loss: 0.8871 - val_accuracy: 0.7787 - 1s/epoch - 4ms/step Epoch 70/100 283/283 - 1s - loss: 0.5407 - accuracy: 0.8251 - val_loss: 1.2404 - val_accuracy: 0.7157 - 1s/epoch - 4ms/step Epoch 71/100 283/283 - 1s - loss: 0.6426 - accuracy: 0.8011 - val_loss: 1.0023 - val_accuracy: 0.7730 - 1s/epoch - 4ms/step Epoch 72/100 283/283 - 1s - loss: 0.5180 - accuracy: 0.8343 - val_loss: 0.8859 - val_accuracy: 0.7833 - 1s/epoch - 4ms/step Epoch 73/100 283/283 - 1s - loss: 0.5204 - accuracy: 0.8336 - val_loss: 0.9411 - val_accuracy: 0.7783 - 1s/epoch - 4ms/step Epoch 74/100 283/283 - 1s - loss: 0.4945 - accuracy: 0.8405 - val_loss: 0.9288 - val_accuracy: 0.7840 - 1s/epoch - 4ms/step Epoch 75/100 283/283 - 1s - loss: 0.5020 - accuracy: 0.8424 - val_loss: 0.9603 - val_accuracy: 0.7800 - 1s/epoch - 4ms/step Epoch 76/100 283/283 - 1s - loss: 0.5003 - accuracy: 0.8437 - val_loss: 0.9257 - val_accuracy: 0.7910 - 1s/epoch - 4ms/step Epoch 77/100 283/283 - 1s - loss: 0.5382 - accuracy: 0.8320 - val_loss: 0.9512 - val_accuracy: 0.7727 - 1s/epoch - 4ms/step Epoch 78/100 283/283 - 1s - loss: 0.4734 - accuracy: 0.8468 - val_loss: 1.1122 - val_accuracy: 0.7623 - 1s/epoch - 4ms/step Epoch 79/100 283/283 - 1s - loss: 0.5029 - accuracy: 0.8415 - val_loss: 0.9442 - val_accuracy: 0.7817 - 1s/epoch - 4ms/step Epoch 80/100 283/283 - 1s - loss: 0.5069 - accuracy: 0.8411 - val_loss: 0.9137 - val_accuracy: 0.7953 - 1s/epoch - 4ms/step Epoch 81/100 283/283 - 1s - loss: 0.4824 - accuracy: 0.8494 - val_loss: 0.8661 - val_accuracy: 0.7980 - 1s/epoch - 4ms/step Epoch 82/100 283/283 - 1s - loss: 0.4738 - accuracy: 0.8504 - val_loss: 0.9566 - val_accuracy: 0.7937 - 1s/epoch - 4ms/step Epoch 83/100 283/283 - 1s - loss: 0.4623 - accuracy: 0.8535 - val_loss: 1.2196 - val_accuracy: 0.7447 - 1s/epoch - 4ms/step Epoch 84/100 283/283 - 1s - loss: 0.4770 - accuracy: 0.8491 - val_loss: 0.8735 - val_accuracy: 0.8027 - 1s/epoch - 4ms/step Epoch 85/100 283/283 - 1s - loss: 0.4754 - accuracy: 0.8537 - val_loss: 0.9473 - val_accuracy: 0.7897 - 1s/epoch - 4ms/step Epoch 86/100 283/283 - 1s - loss: 0.4673 - accuracy: 0.8576 - val_loss: 1.1136 - val_accuracy: 0.7593 - 1s/epoch - 4ms/step Epoch 87/100 283/283 - 1s - loss: 0.4497 - accuracy: 0.8571 - val_loss: 0.9502 - val_accuracy: 0.7930 - 1s/epoch - 4ms/step Epoch 88/100 283/283 - 1s - loss: 0.4612 - accuracy: 0.8535 - val_loss: 1.0115 - val_accuracy: 0.7653 - 1s/epoch - 4ms/step Epoch 89/100 283/283 - 1s - loss: 0.4450 - accuracy: 0.8684 - val_loss: 1.0254 - val_accuracy: 0.7927 - 1s/epoch - 4ms/step Epoch 90/100 283/283 - 1s - loss: 0.4521 - accuracy: 0.8609 - val_loss: 1.0812 - val_accuracy: 0.7770 - 1s/epoch - 4ms/step Epoch 91/100 283/283 - 1s - loss: 0.4239 - accuracy: 0.8698 - val_loss: 0.9747 - val_accuracy: 0.8037 - 1s/epoch - 4ms/step Epoch 92/100 283/283 - 1s - loss: 0.4473 - accuracy: 0.8626 - val_loss: 1.2167 - val_accuracy: 0.7530 - 1s/epoch - 4ms/step Epoch 93/100 283/283 - 1s - loss: 0.4190 - accuracy: 0.8698 - val_loss: 1.0829 - val_accuracy: 0.7883 - 1s/epoch - 4ms/step Epoch 94/100 283/283 - 1s - loss: 0.4217 - accuracy: 0.8694 - val_loss: 0.9980 - val_accuracy: 0.8003 - 1s/epoch - 4ms/step Epoch 95/100 283/283 - 1s - loss: 0.4150 - accuracy: 0.8718 - val_loss: 1.0828 - val_accuracy: 0.7860 - 1s/epoch - 4ms/step Epoch 96/100 283/283 - 1s - loss: 0.3943 - accuracy: 0.8833 - val_loss: 0.9102 - val_accuracy: 0.8160 - 1s/epoch - 5ms/step Epoch 97/100 283/283 - 1s - loss: 0.4116 - accuracy: 0.8705 - val_loss: 0.9355 - val_accuracy: 0.7990 - 1s/epoch - 5ms/step Epoch 98/100 283/283 - 1s - loss: 0.3878 - accuracy: 0.8784 - val_loss: 0.9674 - val_accuracy: 0.8057 - 1s/epoch - 4ms/step Epoch 99/100 283/283 - 1s - loss: 0.4008 - accuracy: 0.8795 - val_loss: 1.2376 - val_accuracy: 0.7617 - 1s/epoch - 4ms/step Epoch 100/100 283/283 - 1s - loss: 0.4130 - accuracy: 0.8704 - val_loss: 1.0561 - val_accuracy: 0.7900 - 1s/epoch - 4ms/step 94/94 - 0s - loss: 1.0561 - accuracy: 0.7900 - 218ms/epoch - 2ms/step Baseline Error: 21.00%
We can see that the overfitting problem is basically non existent now, we can try to increase the epochs to let the model fully converge and see if we can get better results.
#Same model and other params, change epochs
model_31new = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(31, 31, 1)),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dropout(0.5),
layers.Dense(15, activation='softmax')
])
model_31new.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model_31new.fit(X_train31, train_labels, validation_data=(X_val31, validation_labels),
epochs=300, batch_size=32, verbose=2, class_weight = class_weight) #adjust epochs to 300
scores = model_31new.evaluate(X_val31, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df31.loc[len(df31)] = ['RegularizedDOModel2', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/300 283/283 - 3s - loss: 3.1348 - accuracy: 0.1166 - val_loss: 2.8202 - val_accuracy: 0.0950 - 3s/epoch - 12ms/step Epoch 2/300 283/283 - 1s - loss: 2.7163 - accuracy: 0.2100 - val_loss: 2.4495 - val_accuracy: 0.1860 - 1s/epoch - 4ms/step Epoch 3/300 283/283 - 1s - loss: 2.4611 - accuracy: 0.2524 - val_loss: 2.3568 - val_accuracy: 0.2273 - 1s/epoch - 4ms/step Epoch 4/300 283/283 - 1s - loss: 2.3119 - accuracy: 0.2952 - val_loss: 2.0924 - val_accuracy: 0.3103 - 1s/epoch - 4ms/step Epoch 5/300 283/283 - 1s - loss: 2.1461 - accuracy: 0.3343 - val_loss: 1.9254 - val_accuracy: 0.3757 - 1s/epoch - 4ms/step Epoch 6/300 283/283 - 1s - loss: 2.0484 - accuracy: 0.3665 - val_loss: 1.8364 - val_accuracy: 0.4033 - 1s/epoch - 4ms/step Epoch 7/300 283/283 - 1s - loss: 1.9275 - accuracy: 0.4026 - val_loss: 1.8395 - val_accuracy: 0.4000 - 1s/epoch - 4ms/step Epoch 8/300 283/283 - 1s - loss: 1.8391 - accuracy: 0.4199 - val_loss: 1.7678 - val_accuracy: 0.4210 - 1s/epoch - 4ms/step Epoch 9/300 283/283 - 1s - loss: 1.7720 - accuracy: 0.4393 - val_loss: 1.6541 - val_accuracy: 0.4637 - 1s/epoch - 4ms/step Epoch 10/300 283/283 - 1s - loss: 1.6887 - accuracy: 0.4658 - val_loss: 1.7132 - val_accuracy: 0.4350 - 1s/epoch - 4ms/step Epoch 11/300 283/283 - 1s - loss: 1.6333 - accuracy: 0.4795 - val_loss: 1.6824 - val_accuracy: 0.4500 - 1s/epoch - 4ms/step Epoch 12/300 283/283 - 1s - loss: 1.5633 - accuracy: 0.4966 - val_loss: 1.4250 - val_accuracy: 0.5480 - 1s/epoch - 4ms/step Epoch 13/300 283/283 - 1s - loss: 1.5111 - accuracy: 0.5116 - val_loss: 1.3878 - val_accuracy: 0.5570 - 1s/epoch - 4ms/step Epoch 14/300 283/283 - 1s - loss: 1.4515 - accuracy: 0.5364 - val_loss: 1.4584 - val_accuracy: 0.5327 - 1s/epoch - 4ms/step Epoch 15/300 283/283 - 1s - loss: 1.4485 - accuracy: 0.5411 - val_loss: 1.3013 - val_accuracy: 0.5833 - 1s/epoch - 4ms/step Epoch 16/300 283/283 - 1s - loss: 1.3917 - accuracy: 0.5503 - val_loss: 1.2499 - val_accuracy: 0.6227 - 1s/epoch - 4ms/step Epoch 17/300 283/283 - 1s - loss: 1.3694 - accuracy: 0.5623 - val_loss: 1.2354 - val_accuracy: 0.5967 - 1s/epoch - 4ms/step Epoch 18/300 283/283 - 1s - loss: 1.3375 - accuracy: 0.5714 - val_loss: 1.2662 - val_accuracy: 0.5993 - 1s/epoch - 4ms/step Epoch 19/300 283/283 - 1s - loss: 1.3021 - accuracy: 0.5776 - val_loss: 1.2725 - val_accuracy: 0.5897 - 1s/epoch - 4ms/step Epoch 20/300 283/283 - 1s - loss: 1.2703 - accuracy: 0.5846 - val_loss: 1.1179 - val_accuracy: 0.6453 - 1s/epoch - 4ms/step Epoch 21/300 283/283 - 1s - loss: 1.2592 - accuracy: 0.5927 - val_loss: 1.1403 - val_accuracy: 0.6433 - 1s/epoch - 4ms/step Epoch 22/300 283/283 - 1s - loss: 1.2724 - accuracy: 0.5857 - val_loss: 1.1366 - val_accuracy: 0.6450 - 1s/epoch - 4ms/step Epoch 23/300 283/283 - 1s - loss: 1.2330 - accuracy: 0.6005 - val_loss: 1.1607 - val_accuracy: 0.6370 - 1s/epoch - 4ms/step Epoch 24/300 283/283 - 1s - loss: 1.1852 - accuracy: 0.6101 - val_loss: 1.0979 - val_accuracy: 0.6563 - 1s/epoch - 4ms/step Epoch 25/300 283/283 - 1s - loss: 1.1639 - accuracy: 0.6164 - val_loss: 1.0875 - val_accuracy: 0.6617 - 1s/epoch - 4ms/step Epoch 26/300 283/283 - 1s - loss: 1.1705 - accuracy: 0.6111 - val_loss: 1.0990 - val_accuracy: 0.6587 - 1s/epoch - 4ms/step Epoch 27/300 283/283 - 1s - loss: 1.1276 - accuracy: 0.6284 - val_loss: 1.0418 - val_accuracy: 0.6770 - 1s/epoch - 4ms/step Epoch 28/300 283/283 - 1s - loss: 1.1303 - accuracy: 0.6296 - val_loss: 1.0353 - val_accuracy: 0.6797 - 1s/epoch - 4ms/step Epoch 29/300 283/283 - 1s - loss: 1.0934 - accuracy: 0.6423 - val_loss: 1.0912 - val_accuracy: 0.6637 - 1s/epoch - 4ms/step Epoch 30/300 283/283 - 1s - loss: 1.0921 - accuracy: 0.6392 - val_loss: 1.0040 - val_accuracy: 0.6890 - 1s/epoch - 4ms/step Epoch 31/300 283/283 - 1s - loss: 1.0624 - accuracy: 0.6535 - val_loss: 1.0834 - val_accuracy: 0.6577 - 1s/epoch - 4ms/step Epoch 32/300 283/283 - 1s - loss: 1.0614 - accuracy: 0.6520 - val_loss: 0.9979 - val_accuracy: 0.6837 - 1s/epoch - 4ms/step Epoch 33/300 283/283 - 1s - loss: 1.0240 - accuracy: 0.6624 - val_loss: 0.9607 - val_accuracy: 0.7030 - 1s/epoch - 4ms/step Epoch 34/300 283/283 - 1s - loss: 1.0126 - accuracy: 0.6607 - val_loss: 1.0190 - val_accuracy: 0.6913 - 1s/epoch - 4ms/step Epoch 35/300 283/283 - 1s - loss: 1.0271 - accuracy: 0.6661 - val_loss: 1.0449 - val_accuracy: 0.6840 - 1s/epoch - 4ms/step Epoch 36/300 283/283 - 1s - loss: 1.0016 - accuracy: 0.6638 - val_loss: 1.0300 - val_accuracy: 0.6767 - 1s/epoch - 4ms/step Epoch 37/300 283/283 - 1s - loss: 0.9630 - accuracy: 0.6807 - val_loss: 1.1201 - val_accuracy: 0.6643 - 1s/epoch - 4ms/step Epoch 38/300 283/283 - 1s - loss: 0.9567 - accuracy: 0.6794 - val_loss: 1.0279 - val_accuracy: 0.6857 - 1s/epoch - 4ms/step Epoch 39/300 283/283 - 1s - loss: 0.9528 - accuracy: 0.6848 - val_loss: 1.0361 - val_accuracy: 0.6853 - 1s/epoch - 4ms/step Epoch 40/300 283/283 - 1s - loss: 0.9329 - accuracy: 0.6897 - val_loss: 1.1854 - val_accuracy: 0.6520 - 1s/epoch - 4ms/step Epoch 41/300 283/283 - 1s - loss: 0.9408 - accuracy: 0.6869 - val_loss: 0.9601 - val_accuracy: 0.7037 - 1s/epoch - 4ms/step Epoch 42/300 283/283 - 1s - loss: 0.9164 - accuracy: 0.6937 - val_loss: 0.9108 - val_accuracy: 0.7283 - 1s/epoch - 4ms/step Epoch 43/300 283/283 - 1s - loss: 0.9281 - accuracy: 0.6951 - val_loss: 0.9048 - val_accuracy: 0.7320 - 1s/epoch - 5ms/step Epoch 44/300 283/283 - 1s - loss: 0.8990 - accuracy: 0.7039 - val_loss: 1.0257 - val_accuracy: 0.6913 - 1s/epoch - 4ms/step Epoch 45/300 283/283 - 1s - loss: 0.8816 - accuracy: 0.7080 - val_loss: 0.9517 - val_accuracy: 0.7160 - 1s/epoch - 4ms/step Epoch 46/300 283/283 - 1s - loss: 0.8914 - accuracy: 0.6989 - val_loss: 0.9446 - val_accuracy: 0.7253 - 1s/epoch - 4ms/step Epoch 47/300 283/283 - 1s - loss: 0.8497 - accuracy: 0.7150 - val_loss: 0.8555 - val_accuracy: 0.7487 - 1s/epoch - 4ms/step Epoch 48/300 283/283 - 1s - loss: 0.8242 - accuracy: 0.7214 - val_loss: 0.8457 - val_accuracy: 0.7503 - 1s/epoch - 4ms/step Epoch 49/300 283/283 - 1s - loss: 0.8518 - accuracy: 0.7205 - val_loss: 0.8802 - val_accuracy: 0.7477 - 1s/epoch - 4ms/step Epoch 50/300 283/283 - 1s - loss: 0.8067 - accuracy: 0.7280 - val_loss: 0.8763 - val_accuracy: 0.7470 - 1s/epoch - 4ms/step Epoch 51/300 283/283 - 1s - loss: 0.8122 - accuracy: 0.7293 - val_loss: 0.8521 - val_accuracy: 0.7550 - 1s/epoch - 4ms/step Epoch 52/300 283/283 - 1s - loss: 0.7702 - accuracy: 0.7411 - val_loss: 1.0250 - val_accuracy: 0.6993 - 1s/epoch - 4ms/step Epoch 53/300 283/283 - 1s - loss: 0.8156 - accuracy: 0.7266 - val_loss: 0.8267 - val_accuracy: 0.7607 - 1s/epoch - 4ms/step Epoch 54/300 283/283 - 1s - loss: 0.7676 - accuracy: 0.7432 - val_loss: 0.8517 - val_accuracy: 0.7473 - 1s/epoch - 4ms/step Epoch 55/300 283/283 - 1s - loss: 0.7671 - accuracy: 0.7466 - val_loss: 0.8345 - val_accuracy: 0.7653 - 1s/epoch - 4ms/step Epoch 56/300 283/283 - 1s - loss: 0.7632 - accuracy: 0.7512 - val_loss: 0.8147 - val_accuracy: 0.7593 - 1s/epoch - 4ms/step Epoch 57/300 283/283 - 1s - loss: 0.7761 - accuracy: 0.7410 - val_loss: 0.8169 - val_accuracy: 0.7650 - 1s/epoch - 4ms/step Epoch 58/300 283/283 - 1s - loss: 0.7628 - accuracy: 0.7453 - val_loss: 0.8814 - val_accuracy: 0.7400 - 1s/epoch - 5ms/step Epoch 59/300 283/283 - 1s - loss: 0.7283 - accuracy: 0.7551 - val_loss: 0.7834 - val_accuracy: 0.7730 - 1s/epoch - 4ms/step Epoch 60/300 283/283 - 1s - loss: 0.7267 - accuracy: 0.7579 - val_loss: 0.7850 - val_accuracy: 0.7760 - 1s/epoch - 4ms/step Epoch 61/300 283/283 - 1s - loss: 0.7231 - accuracy: 0.7581 - val_loss: 0.9390 - val_accuracy: 0.7293 - 1s/epoch - 4ms/step Epoch 62/300 283/283 - 1s - loss: 0.7389 - accuracy: 0.7568 - val_loss: 0.8118 - val_accuracy: 0.7700 - 1s/epoch - 4ms/step Epoch 63/300 283/283 - 1s - loss: 0.7223 - accuracy: 0.7584 - val_loss: 0.8551 - val_accuracy: 0.7607 - 1s/epoch - 4ms/step Epoch 64/300 283/283 - 1s - loss: 0.7129 - accuracy: 0.7628 - val_loss: 0.9686 - val_accuracy: 0.7383 - 1s/epoch - 4ms/step Epoch 65/300 283/283 - 1s - loss: 0.6648 - accuracy: 0.7784 - val_loss: 0.8030 - val_accuracy: 0.7790 - 1s/epoch - 4ms/step Epoch 66/300 283/283 - 1s - loss: 0.6962 - accuracy: 0.7703 - val_loss: 1.0090 - val_accuracy: 0.7280 - 1s/epoch - 4ms/step Epoch 67/300 283/283 - 1s - loss: 0.7373 - accuracy: 0.7565 - val_loss: 0.9441 - val_accuracy: 0.7287 - 1s/epoch - 4ms/step Epoch 68/300 283/283 - 1s - loss: 0.6657 - accuracy: 0.7764 - val_loss: 0.8653 - val_accuracy: 0.7677 - 1s/epoch - 4ms/step Epoch 69/300 283/283 - 1s - loss: 0.6534 - accuracy: 0.7829 - val_loss: 0.8783 - val_accuracy: 0.7440 - 1s/epoch - 4ms/step Epoch 70/300 283/283 - 1s - loss: 0.6903 - accuracy: 0.7650 - val_loss: 0.7778 - val_accuracy: 0.7823 - 1s/epoch - 4ms/step Epoch 71/300 283/283 - 1s - loss: 0.6664 - accuracy: 0.7779 - val_loss: 0.8712 - val_accuracy: 0.7657 - 1s/epoch - 4ms/step Epoch 72/300 283/283 - 1s - loss: 0.6343 - accuracy: 0.7832 - val_loss: 0.9044 - val_accuracy: 0.7627 - 1s/epoch - 4ms/step Epoch 73/300 283/283 - 1s - loss: 0.6554 - accuracy: 0.7808 - val_loss: 0.8100 - val_accuracy: 0.7710 - 1s/epoch - 4ms/step Epoch 74/300 283/283 - 1s - loss: 0.6298 - accuracy: 0.7900 - val_loss: 0.8039 - val_accuracy: 0.7870 - 1s/epoch - 4ms/step Epoch 75/300 283/283 - 1s - loss: 0.6089 - accuracy: 0.7911 - val_loss: 0.9103 - val_accuracy: 0.7493 - 1s/epoch - 4ms/step Epoch 76/300 283/283 - 1s - loss: 0.6242 - accuracy: 0.7967 - val_loss: 0.9285 - val_accuracy: 0.7460 - 1s/epoch - 4ms/step Epoch 77/300 283/283 - 1s - loss: 0.6215 - accuracy: 0.7955 - val_loss: 0.8452 - val_accuracy: 0.7707 - 1s/epoch - 5ms/step Epoch 78/300 283/283 - 1s - loss: 0.6253 - accuracy: 0.7911 - val_loss: 0.7904 - val_accuracy: 0.7827 - 1s/epoch - 4ms/step Epoch 79/300 283/283 - 1s - loss: 0.6028 - accuracy: 0.8019 - val_loss: 0.7923 - val_accuracy: 0.7853 - 1s/epoch - 4ms/step Epoch 80/300 283/283 - 1s - loss: 0.6119 - accuracy: 0.7981 - val_loss: 0.7997 - val_accuracy: 0.7797 - 1s/epoch - 4ms/step Epoch 81/300 283/283 - 1s - loss: 0.6265 - accuracy: 0.7969 - val_loss: 0.7987 - val_accuracy: 0.7883 - 1s/epoch - 4ms/step Epoch 82/300 283/283 - 1s - loss: 0.5458 - accuracy: 0.8199 - val_loss: 0.7891 - val_accuracy: 0.7887 - 1s/epoch - 5ms/step Epoch 83/300 283/283 - 1s - loss: 0.5957 - accuracy: 0.8039 - val_loss: 0.9033 - val_accuracy: 0.7713 - 1s/epoch - 4ms/step Epoch 84/300 283/283 - 1s - loss: 0.5774 - accuracy: 0.8090 - val_loss: 0.8252 - val_accuracy: 0.7800 - 1s/epoch - 4ms/step Epoch 85/300 283/283 - 1s - loss: 0.5555 - accuracy: 0.8203 - val_loss: 0.9088 - val_accuracy: 0.7647 - 1s/epoch - 4ms/step Epoch 86/300 283/283 - 1s - loss: 0.5969 - accuracy: 0.8016 - val_loss: 0.9000 - val_accuracy: 0.7770 - 1s/epoch - 4ms/step Epoch 87/300 283/283 - 1s - loss: 0.5765 - accuracy: 0.8095 - val_loss: 0.8581 - val_accuracy: 0.7893 - 1s/epoch - 4ms/step Epoch 88/300 283/283 - 1s - loss: 0.5542 - accuracy: 0.8166 - val_loss: 0.8227 - val_accuracy: 0.7920 - 1s/epoch - 4ms/step Epoch 89/300 283/283 - 1s - loss: 0.5789 - accuracy: 0.8125 - val_loss: 0.7923 - val_accuracy: 0.7910 - 1s/epoch - 4ms/step Epoch 90/300 283/283 - 1s - loss: 0.5193 - accuracy: 0.8281 - val_loss: 0.9567 - val_accuracy: 0.7560 - 1s/epoch - 4ms/step Epoch 91/300 283/283 - 1s - loss: 0.5308 - accuracy: 0.8220 - val_loss: 0.7564 - val_accuracy: 0.7913 - 1s/epoch - 4ms/step Epoch 92/300 283/283 - 1s - loss: 0.5584 - accuracy: 0.8223 - val_loss: 0.8272 - val_accuracy: 0.7893 - 1s/epoch - 4ms/step Epoch 93/300 283/283 - 1s - loss: 0.5164 - accuracy: 0.8295 - val_loss: 0.7645 - val_accuracy: 0.7923 - 1s/epoch - 4ms/step Epoch 94/300 283/283 - 1s - loss: 0.5482 - accuracy: 0.8200 - val_loss: 0.8663 - val_accuracy: 0.7807 - 1s/epoch - 4ms/step Epoch 95/300 283/283 - 1s - loss: 0.5271 - accuracy: 0.8311 - val_loss: 0.9183 - val_accuracy: 0.7790 - 1s/epoch - 4ms/step Epoch 96/300 283/283 - 1s - loss: 0.5320 - accuracy: 0.8265 - val_loss: 1.0680 - val_accuracy: 0.7627 - 1s/epoch - 4ms/step Epoch 97/300 283/283 - 1s - loss: 0.5274 - accuracy: 0.8213 - val_loss: 0.8292 - val_accuracy: 0.7983 - 1s/epoch - 4ms/step Epoch 98/300 283/283 - 1s - loss: 0.5129 - accuracy: 0.8286 - val_loss: 0.7916 - val_accuracy: 0.8013 - 1s/epoch - 4ms/step Epoch 99/300 283/283 - 1s - loss: 0.5228 - accuracy: 0.8299 - val_loss: 0.8376 - val_accuracy: 0.7893 - 1s/epoch - 4ms/step Epoch 100/300 283/283 - 1s - loss: 0.5041 - accuracy: 0.8322 - val_loss: 0.8607 - val_accuracy: 0.7827 - 1s/epoch - 4ms/step Epoch 101/300 283/283 - 1s - loss: 0.4844 - accuracy: 0.8389 - val_loss: 0.9106 - val_accuracy: 0.7853 - 1s/epoch - 4ms/step Epoch 102/300 283/283 - 1s - loss: 0.4888 - accuracy: 0.8367 - val_loss: 0.8914 - val_accuracy: 0.7853 - 1s/epoch - 4ms/step Epoch 103/300 283/283 - 1s - loss: 0.4973 - accuracy: 0.8365 - val_loss: 0.8903 - val_accuracy: 0.7917 - 1s/epoch - 4ms/step Epoch 104/300 283/283 - 1s - loss: 0.5103 - accuracy: 0.8323 - val_loss: 0.8535 - val_accuracy: 0.7963 - 1s/epoch - 4ms/step Epoch 105/300 283/283 - 1s - loss: 0.4848 - accuracy: 0.8391 - val_loss: 0.8781 - val_accuracy: 0.7920 - 1s/epoch - 4ms/step Epoch 106/300 283/283 - 1s - loss: 0.4705 - accuracy: 0.8487 - val_loss: 0.8640 - val_accuracy: 0.7953 - 1s/epoch - 4ms/step Epoch 107/300 283/283 - 1s - loss: 0.5084 - accuracy: 0.8404 - val_loss: 0.9201 - val_accuracy: 0.7760 - 1s/epoch - 4ms/step Epoch 108/300 283/283 - 1s - loss: 0.4837 - accuracy: 0.8496 - val_loss: 0.8853 - val_accuracy: 0.7833 - 1s/epoch - 4ms/step Epoch 109/300 283/283 - 1s - loss: 0.4976 - accuracy: 0.8386 - val_loss: 0.8580 - val_accuracy: 0.7910 - 1s/epoch - 4ms/step Epoch 110/300 283/283 - 1s - loss: 0.4864 - accuracy: 0.8432 - val_loss: 0.9199 - val_accuracy: 0.7877 - 1s/epoch - 4ms/step Epoch 111/300 283/283 - 1s - loss: 0.4934 - accuracy: 0.8438 - val_loss: 1.0693 - val_accuracy: 0.7523 - 1s/epoch - 4ms/step Epoch 112/300 283/283 - 1s - loss: 0.4454 - accuracy: 0.8572 - val_loss: 0.9064 - val_accuracy: 0.7833 - 1s/epoch - 4ms/step Epoch 113/300 283/283 - 1s - loss: 0.4688 - accuracy: 0.8505 - val_loss: 0.8808 - val_accuracy: 0.7930 - 1s/epoch - 4ms/step Epoch 114/300 283/283 - 1s - loss: 0.4452 - accuracy: 0.8525 - val_loss: 0.9373 - val_accuracy: 0.7793 - 1s/epoch - 4ms/step Epoch 115/300 283/283 - 1s - loss: 0.4457 - accuracy: 0.8560 - val_loss: 1.1024 - val_accuracy: 0.7573 - 1s/epoch - 4ms/step Epoch 116/300 283/283 - 1s - loss: 0.4901 - accuracy: 0.8419 - val_loss: 0.8895 - val_accuracy: 0.7970 - 1s/epoch - 4ms/step Epoch 117/300 283/283 - 1s - loss: 0.4454 - accuracy: 0.8530 - val_loss: 0.8834 - val_accuracy: 0.7977 - 1s/epoch - 4ms/step Epoch 118/300 283/283 - 1s - loss: 0.4604 - accuracy: 0.8546 - val_loss: 0.8400 - val_accuracy: 0.8040 - 1s/epoch - 4ms/step Epoch 119/300 283/283 - 1s - loss: 0.4638 - accuracy: 0.8530 - val_loss: 0.9483 - val_accuracy: 0.7737 - 1s/epoch - 4ms/step Epoch 120/300 283/283 - 1s - loss: 0.4327 - accuracy: 0.8611 - val_loss: 0.9507 - val_accuracy: 0.7943 - 1s/epoch - 4ms/step Epoch 121/300 283/283 - 1s - loss: 0.4526 - accuracy: 0.8551 - val_loss: 0.9027 - val_accuracy: 0.7893 - 1s/epoch - 4ms/step Epoch 122/300 283/283 - 1s - loss: 0.4547 - accuracy: 0.8558 - val_loss: 0.9184 - val_accuracy: 0.7920 - 1s/epoch - 4ms/step Epoch 123/300 283/283 - 1s - loss: 0.4087 - accuracy: 0.8687 - val_loss: 0.9014 - val_accuracy: 0.7960 - 1s/epoch - 4ms/step Epoch 124/300 283/283 - 1s - loss: 0.4014 - accuracy: 0.8714 - val_loss: 0.8588 - val_accuracy: 0.8123 - 1s/epoch - 4ms/step Epoch 125/300 283/283 - 1s - loss: 0.4301 - accuracy: 0.8651 - val_loss: 1.0037 - val_accuracy: 0.7727 - 1s/epoch - 4ms/step Epoch 126/300 283/283 - 1s - loss: 0.4038 - accuracy: 0.8710 - val_loss: 0.9286 - val_accuracy: 0.8093 - 1s/epoch - 5ms/step Epoch 127/300 283/283 - 1s - loss: 0.4201 - accuracy: 0.8658 - val_loss: 1.0987 - val_accuracy: 0.7720 - 1s/epoch - 4ms/step Epoch 128/300 283/283 - 1s - loss: 0.4297 - accuracy: 0.8658 - val_loss: 0.9787 - val_accuracy: 0.7827 - 1s/epoch - 4ms/step Epoch 129/300 283/283 - 1s - loss: 0.4491 - accuracy: 0.8536 - val_loss: 0.9213 - val_accuracy: 0.7977 - 1s/epoch - 4ms/step Epoch 130/300 283/283 - 1s - loss: 0.4203 - accuracy: 0.8628 - val_loss: 1.0757 - val_accuracy: 0.7643 - 1s/epoch - 4ms/step Epoch 131/300 283/283 - 1s - loss: 0.4037 - accuracy: 0.8706 - val_loss: 0.9914 - val_accuracy: 0.7960 - 1s/epoch - 4ms/step Epoch 132/300 283/283 - 1s - loss: 0.4131 - accuracy: 0.8656 - val_loss: 0.9210 - val_accuracy: 0.7977 - 1s/epoch - 4ms/step Epoch 133/300 283/283 - 1s - loss: 0.4093 - accuracy: 0.8693 - val_loss: 0.9749 - val_accuracy: 0.7950 - 1s/epoch - 4ms/step Epoch 134/300 283/283 - 1s - loss: 0.3880 - accuracy: 0.8755 - val_loss: 0.9718 - val_accuracy: 0.7960 - 1s/epoch - 4ms/step Epoch 135/300 283/283 - 1s - loss: 0.3914 - accuracy: 0.8773 - val_loss: 0.9609 - val_accuracy: 0.8033 - 1s/epoch - 4ms/step Epoch 136/300 283/283 - 1s - loss: 0.3910 - accuracy: 0.8785 - val_loss: 1.1285 - val_accuracy: 0.7783 - 1s/epoch - 4ms/step Epoch 137/300 283/283 - 1s - loss: 0.4299 - accuracy: 0.8670 - val_loss: 0.9187 - val_accuracy: 0.7970 - 1s/epoch - 4ms/step Epoch 138/300 283/283 - 1s - loss: 0.4047 - accuracy: 0.8714 - val_loss: 0.9399 - val_accuracy: 0.8090 - 1s/epoch - 4ms/step Epoch 139/300 283/283 - 1s - loss: 0.3826 - accuracy: 0.8784 - val_loss: 1.0083 - val_accuracy: 0.7790 - 1s/epoch - 4ms/step Epoch 140/300 283/283 - 1s - loss: 0.3941 - accuracy: 0.8765 - val_loss: 0.9963 - val_accuracy: 0.7887 - 1s/epoch - 4ms/step Epoch 141/300 283/283 - 1s - loss: 0.3913 - accuracy: 0.8775 - val_loss: 1.0020 - val_accuracy: 0.7817 - 1s/epoch - 4ms/step Epoch 142/300 283/283 - 1s - loss: 0.3683 - accuracy: 0.8806 - val_loss: 1.0068 - val_accuracy: 0.8013 - 1s/epoch - 4ms/step Epoch 143/300 283/283 - 1s - loss: 0.3931 - accuracy: 0.8790 - val_loss: 0.9503 - val_accuracy: 0.7853 - 1s/epoch - 4ms/step Epoch 144/300 283/283 - 1s - loss: 0.3928 - accuracy: 0.8775 - val_loss: 1.0095 - val_accuracy: 0.7867 - 1s/epoch - 4ms/step Epoch 145/300 283/283 - 1s - loss: 0.3958 - accuracy: 0.8735 - val_loss: 0.9686 - val_accuracy: 0.7960 - 1s/epoch - 4ms/step Epoch 146/300 283/283 - 1s - loss: 0.3738 - accuracy: 0.8858 - val_loss: 1.1791 - val_accuracy: 0.7733 - 1s/epoch - 4ms/step Epoch 147/300 283/283 - 1s - loss: 0.3466 - accuracy: 0.8880 - val_loss: 1.0815 - val_accuracy: 0.7920 - 1s/epoch - 4ms/step Epoch 148/300 283/283 - 1s - loss: 0.3727 - accuracy: 0.8825 - val_loss: 1.0038 - val_accuracy: 0.8000 - 1s/epoch - 4ms/step Epoch 149/300 283/283 - 1s - loss: 0.3578 - accuracy: 0.8854 - val_loss: 1.0735 - val_accuracy: 0.7890 - 1s/epoch - 4ms/step Epoch 150/300 283/283 - 1s - loss: 0.3548 - accuracy: 0.8898 - val_loss: 1.0359 - val_accuracy: 0.7933 - 1s/epoch - 4ms/step Epoch 151/300 283/283 - 1s - loss: 0.3730 - accuracy: 0.8862 - val_loss: 0.9935 - val_accuracy: 0.7950 - 1s/epoch - 4ms/step Epoch 152/300 283/283 - 1s - loss: 0.3605 - accuracy: 0.8857 - val_loss: 1.0255 - val_accuracy: 0.7997 - 1s/epoch - 4ms/step Epoch 153/300 283/283 - 1s - loss: 0.3699 - accuracy: 0.8824 - val_loss: 1.0190 - val_accuracy: 0.7917 - 1s/epoch - 4ms/step Epoch 154/300 283/283 - 1s - loss: 0.3771 - accuracy: 0.8827 - val_loss: 0.9451 - val_accuracy: 0.7913 - 1s/epoch - 4ms/step Epoch 155/300 283/283 - 1s - loss: 0.3611 - accuracy: 0.8848 - val_loss: 0.9575 - val_accuracy: 0.7990 - 1s/epoch - 4ms/step Epoch 156/300 283/283 - 1s - loss: 0.3613 - accuracy: 0.8850 - val_loss: 1.0040 - val_accuracy: 0.7993 - 1s/epoch - 4ms/step Epoch 157/300 283/283 - 1s - loss: 0.3524 - accuracy: 0.8882 - val_loss: 1.0366 - val_accuracy: 0.7967 - 1s/epoch - 4ms/step Epoch 158/300 283/283 - 1s - loss: 0.3660 - accuracy: 0.8880 - val_loss: 0.9877 - val_accuracy: 0.8057 - 1s/epoch - 4ms/step Epoch 159/300 283/283 - 1s - loss: 0.3403 - accuracy: 0.8942 - val_loss: 1.0786 - val_accuracy: 0.7927 - 1s/epoch - 4ms/step Epoch 160/300 283/283 - 1s - loss: 0.4417 - accuracy: 0.8686 - val_loss: 1.1056 - val_accuracy: 0.7697 - 1s/epoch - 4ms/step Epoch 161/300 283/283 - 1s - loss: 0.3418 - accuracy: 0.8932 - val_loss: 1.0011 - val_accuracy: 0.7970 - 1s/epoch - 4ms/step Epoch 162/300 283/283 - 1s - loss: 0.3371 - accuracy: 0.8958 - val_loss: 1.0302 - val_accuracy: 0.7933 - 1s/epoch - 5ms/step Epoch 163/300 283/283 - 1s - loss: 0.3452 - accuracy: 0.8930 - val_loss: 1.0098 - val_accuracy: 0.8047 - 1s/epoch - 4ms/step Epoch 164/300 283/283 - 1s - loss: 0.3253 - accuracy: 0.9006 - val_loss: 1.0711 - val_accuracy: 0.7923 - 1s/epoch - 4ms/step Epoch 165/300 283/283 - 1s - loss: 0.3316 - accuracy: 0.8967 - val_loss: 0.9604 - val_accuracy: 0.7993 - 1s/epoch - 4ms/step Epoch 166/300 283/283 - 1s - loss: 0.3338 - accuracy: 0.8926 - val_loss: 1.1320 - val_accuracy: 0.7917 - 1s/epoch - 4ms/step Epoch 167/300 283/283 - 1s - loss: 0.4011 - accuracy: 0.8788 - val_loss: 1.0862 - val_accuracy: 0.7840 - 1s/epoch - 4ms/step Epoch 168/300 283/283 - 1s - loss: 0.3426 - accuracy: 0.8922 - val_loss: 1.1471 - val_accuracy: 0.7860 - 1s/epoch - 4ms/step Epoch 169/300 283/283 - 1s - loss: 0.3243 - accuracy: 0.8973 - val_loss: 1.0822 - val_accuracy: 0.7993 - 1s/epoch - 4ms/step Epoch 170/300 283/283 - 1s - loss: 0.3111 - accuracy: 0.9025 - val_loss: 1.0963 - val_accuracy: 0.7937 - 1s/epoch - 4ms/step Epoch 171/300 283/283 - 1s - loss: 0.3660 - accuracy: 0.8885 - val_loss: 1.0567 - val_accuracy: 0.7997 - 1s/epoch - 4ms/step Epoch 172/300 283/283 - 1s - loss: 0.3162 - accuracy: 0.9055 - val_loss: 1.1083 - val_accuracy: 0.7973 - 1s/epoch - 4ms/step Epoch 173/300 283/283 - 1s - loss: 0.3170 - accuracy: 0.9026 - val_loss: 1.0414 - val_accuracy: 0.7927 - 1s/epoch - 4ms/step Epoch 174/300 283/283 - 1s - loss: 0.3232 - accuracy: 0.9047 - val_loss: 1.2663 - val_accuracy: 0.7780 - 1s/epoch - 4ms/step Epoch 175/300 283/283 - 1s - loss: 0.3357 - accuracy: 0.8982 - val_loss: 1.1731 - val_accuracy: 0.7893 - 1s/epoch - 4ms/step Epoch 176/300 283/283 - 1s - loss: 0.3288 - accuracy: 0.8999 - val_loss: 1.1959 - val_accuracy: 0.7807 - 1s/epoch - 4ms/step Epoch 177/300 283/283 - 1s - loss: 0.3205 - accuracy: 0.8994 - val_loss: 1.0934 - val_accuracy: 0.8017 - 1s/epoch - 5ms/step Epoch 178/300 283/283 - 1s - loss: 0.3054 - accuracy: 0.9074 - val_loss: 1.0826 - val_accuracy: 0.7997 - 1s/epoch - 4ms/step Epoch 179/300 283/283 - 1s - loss: 0.3229 - accuracy: 0.9014 - val_loss: 1.0334 - val_accuracy: 0.7957 - 1s/epoch - 4ms/step Epoch 180/300 283/283 - 1s - loss: 0.3258 - accuracy: 0.8993 - val_loss: 1.0944 - val_accuracy: 0.8027 - 1s/epoch - 4ms/step Epoch 181/300 283/283 - 1s - loss: 0.2942 - accuracy: 0.9129 - val_loss: 1.1599 - val_accuracy: 0.8007 - 1s/epoch - 4ms/step Epoch 182/300 283/283 - 1s - loss: 0.3023 - accuracy: 0.9108 - val_loss: 1.1662 - val_accuracy: 0.7860 - 1s/epoch - 4ms/step Epoch 183/300 283/283 - 1s - loss: 0.3160 - accuracy: 0.9056 - val_loss: 1.1346 - val_accuracy: 0.7910 - 1s/epoch - 4ms/step Epoch 184/300 283/283 - 1s - loss: 0.2944 - accuracy: 0.9128 - val_loss: 1.0517 - val_accuracy: 0.8073 - 1s/epoch - 4ms/step Epoch 185/300 283/283 - 1s - loss: 0.3179 - accuracy: 0.9041 - val_loss: 1.2973 - val_accuracy: 0.7793 - 1s/epoch - 4ms/step Epoch 186/300 283/283 - 1s - loss: 0.3523 - accuracy: 0.8983 - val_loss: 1.1796 - val_accuracy: 0.7930 - 1s/epoch - 4ms/step Epoch 187/300 283/283 - 1s - loss: 0.2953 - accuracy: 0.9104 - val_loss: 1.1777 - val_accuracy: 0.8010 - 1s/epoch - 4ms/step Epoch 188/300 283/283 - 1s - loss: 0.3194 - accuracy: 0.9010 - val_loss: 1.1365 - val_accuracy: 0.7947 - 1s/epoch - 4ms/step Epoch 189/300 283/283 - 1s - loss: 0.3367 - accuracy: 0.8999 - val_loss: 1.1697 - val_accuracy: 0.7900 - 1s/epoch - 4ms/step Epoch 190/300 283/283 - 1s - loss: 0.2833 - accuracy: 0.9145 - val_loss: 1.1307 - val_accuracy: 0.8010 - 1s/epoch - 4ms/step Epoch 191/300 283/283 - 1s - loss: 0.2904 - accuracy: 0.9137 - val_loss: 1.2340 - val_accuracy: 0.7810 - 1s/epoch - 4ms/step Epoch 192/300 283/283 - 1s - loss: 0.3178 - accuracy: 0.9043 - val_loss: 1.1162 - val_accuracy: 0.7947 - 1s/epoch - 4ms/step Epoch 193/300 283/283 - 1s - loss: 0.2861 - accuracy: 0.9157 - val_loss: 1.1517 - val_accuracy: 0.7983 - 1s/epoch - 4ms/step Epoch 194/300 283/283 - 1s - loss: 0.2755 - accuracy: 0.9148 - val_loss: 1.3031 - val_accuracy: 0.7923 - 1s/epoch - 4ms/step Epoch 195/300 283/283 - 1s - loss: 0.2928 - accuracy: 0.9101 - val_loss: 1.0559 - val_accuracy: 0.7983 - 1s/epoch - 4ms/step Epoch 196/300 283/283 - 1s - loss: 0.3021 - accuracy: 0.9080 - val_loss: 1.1484 - val_accuracy: 0.7800 - 1s/epoch - 4ms/step Epoch 197/300 283/283 - 1s - loss: 0.3093 - accuracy: 0.9046 - val_loss: 1.1288 - val_accuracy: 0.8010 - 1s/epoch - 4ms/step Epoch 198/300 283/283 - 1s - loss: 0.2918 - accuracy: 0.9074 - val_loss: 1.4267 - val_accuracy: 0.7693 - 1s/epoch - 4ms/step Epoch 199/300 283/283 - 1s - loss: 0.2741 - accuracy: 0.9188 - val_loss: 1.1940 - val_accuracy: 0.7983 - 1s/epoch - 4ms/step Epoch 200/300 283/283 - 1s - loss: 0.2831 - accuracy: 0.9152 - val_loss: 1.1935 - val_accuracy: 0.7963 - 1s/epoch - 4ms/step Epoch 201/300 283/283 - 1s - loss: 0.2805 - accuracy: 0.9175 - val_loss: 1.2605 - val_accuracy: 0.7927 - 1s/epoch - 4ms/step Epoch 202/300 283/283 - 1s - loss: 0.2932 - accuracy: 0.9116 - val_loss: 1.3942 - val_accuracy: 0.7763 - 1s/epoch - 4ms/step Epoch 203/300 283/283 - 1s - loss: 0.3472 - accuracy: 0.8920 - val_loss: 1.1404 - val_accuracy: 0.7960 - 1s/epoch - 4ms/step Epoch 204/300 283/283 - 1s - loss: 0.2848 - accuracy: 0.9163 - val_loss: 1.1130 - val_accuracy: 0.7947 - 1s/epoch - 4ms/step Epoch 205/300 283/283 - 1s - loss: 0.2672 - accuracy: 0.9189 - val_loss: 1.1109 - val_accuracy: 0.8010 - 1s/epoch - 4ms/step Epoch 206/300 283/283 - 1s - loss: 0.2822 - accuracy: 0.9179 - val_loss: 1.1096 - val_accuracy: 0.7923 - 1s/epoch - 4ms/step Epoch 207/300 283/283 - 1s - loss: 0.2645 - accuracy: 0.9209 - val_loss: 1.4944 - val_accuracy: 0.7627 - 1s/epoch - 4ms/step Epoch 208/300 283/283 - 1s - loss: 0.2866 - accuracy: 0.9122 - val_loss: 1.2218 - val_accuracy: 0.7950 - 1s/epoch - 4ms/step Epoch 209/300 283/283 - 1s - loss: 0.2927 - accuracy: 0.9145 - val_loss: 1.0955 - val_accuracy: 0.7960 - 1s/epoch - 4ms/step Epoch 210/300 283/283 - 1s - loss: 0.2850 - accuracy: 0.9148 - val_loss: 1.1840 - val_accuracy: 0.7980 - 1s/epoch - 4ms/step Epoch 211/300 283/283 - 1s - loss: 0.2934 - accuracy: 0.9166 - val_loss: 1.1442 - val_accuracy: 0.7987 - 1s/epoch - 4ms/step Epoch 212/300 283/283 - 1s - loss: 0.2742 - accuracy: 0.9200 - val_loss: 1.4180 - val_accuracy: 0.7630 - 1s/epoch - 4ms/step Epoch 213/300 283/283 - 1s - loss: 0.2571 - accuracy: 0.9251 - val_loss: 1.1914 - val_accuracy: 0.7983 - 1s/epoch - 4ms/step Epoch 214/300 283/283 - 1s - loss: 0.2800 - accuracy: 0.9174 - val_loss: 1.2203 - val_accuracy: 0.7843 - 1s/epoch - 4ms/step Epoch 215/300 283/283 - 1s - loss: 0.2533 - accuracy: 0.9248 - val_loss: 1.2254 - val_accuracy: 0.7977 - 1s/epoch - 4ms/step Epoch 216/300 283/283 - 1s - loss: 0.2618 - accuracy: 0.9220 - val_loss: 1.4186 - val_accuracy: 0.7660 - 1s/epoch - 4ms/step Epoch 217/300 283/283 - 1s - loss: 0.2873 - accuracy: 0.9128 - val_loss: 1.2321 - val_accuracy: 0.7930 - 1s/epoch - 4ms/step Epoch 218/300 283/283 - 1s - loss: 0.2602 - accuracy: 0.9248 - val_loss: 1.3534 - val_accuracy: 0.7793 - 1s/epoch - 4ms/step Epoch 219/300 283/283 - 1s - loss: 0.2621 - accuracy: 0.9209 - val_loss: 1.4190 - val_accuracy: 0.7760 - 1s/epoch - 4ms/step Epoch 220/300 283/283 - 1s - loss: 0.2881 - accuracy: 0.9171 - val_loss: 1.3922 - val_accuracy: 0.7697 - 1s/epoch - 4ms/step Epoch 221/300 283/283 - 1s - loss: 0.2739 - accuracy: 0.9187 - val_loss: 1.4558 - val_accuracy: 0.7697 - 1s/epoch - 4ms/step Epoch 222/300 283/283 - 1s - loss: 0.2796 - accuracy: 0.9138 - val_loss: 1.2145 - val_accuracy: 0.7987 - 1s/epoch - 4ms/step Epoch 223/300 283/283 - 1s - loss: 0.2755 - accuracy: 0.9173 - val_loss: 1.1463 - val_accuracy: 0.7897 - 1s/epoch - 4ms/step Epoch 224/300 283/283 - 1s - loss: 0.2701 - accuracy: 0.9210 - val_loss: 1.1013 - val_accuracy: 0.7873 - 1s/epoch - 4ms/step Epoch 225/300 283/283 - 1s - loss: 0.2607 - accuracy: 0.9242 - val_loss: 1.2183 - val_accuracy: 0.7980 - 1s/epoch - 4ms/step Epoch 226/300 283/283 - 1s - loss: 0.2716 - accuracy: 0.9209 - val_loss: 1.2247 - val_accuracy: 0.7943 - 1s/epoch - 4ms/step Epoch 227/300 283/283 - 1s - loss: 0.2756 - accuracy: 0.9202 - val_loss: 1.2362 - val_accuracy: 0.7970 - 1s/epoch - 5ms/step Epoch 228/300 283/283 - 1s - loss: 0.2479 - accuracy: 0.9301 - val_loss: 1.4131 - val_accuracy: 0.7750 - 1s/epoch - 4ms/step Epoch 229/300 283/283 - 1s - loss: 0.2931 - accuracy: 0.9159 - val_loss: 1.1607 - val_accuracy: 0.8013 - 1s/epoch - 4ms/step Epoch 230/300 283/283 - 1s - loss: 0.2356 - accuracy: 0.9301 - val_loss: 1.5370 - val_accuracy: 0.7623 - 1s/epoch - 4ms/step Epoch 231/300 283/283 - 1s - loss: 0.2699 - accuracy: 0.9212 - val_loss: 1.2875 - val_accuracy: 0.7833 - 1s/epoch - 4ms/step Epoch 232/300 283/283 - 1s - loss: 0.2511 - accuracy: 0.9272 - val_loss: 1.3482 - val_accuracy: 0.7693 - 1s/epoch - 4ms/step Epoch 233/300 283/283 - 1s - loss: 0.2641 - accuracy: 0.9217 - val_loss: 1.2619 - val_accuracy: 0.7863 - 1s/epoch - 4ms/step Epoch 234/300 283/283 - 1s - loss: 0.2824 - accuracy: 0.9199 - val_loss: 1.4691 - val_accuracy: 0.7563 - 1s/epoch - 4ms/step Epoch 235/300 283/283 - 1s - loss: 0.2542 - accuracy: 0.9273 - val_loss: 1.2564 - val_accuracy: 0.7880 - 1s/epoch - 4ms/step Epoch 236/300 283/283 - 1s - loss: 0.2244 - accuracy: 0.9341 - val_loss: 1.3651 - val_accuracy: 0.7910 - 1s/epoch - 4ms/step Epoch 237/300 283/283 - 1s - loss: 0.2514 - accuracy: 0.9250 - val_loss: 1.2797 - val_accuracy: 0.7860 - 1s/epoch - 4ms/step Epoch 238/300 283/283 - 1s - loss: 0.2514 - accuracy: 0.9265 - val_loss: 1.4776 - val_accuracy: 0.7763 - 1s/epoch - 4ms/step Epoch 239/300 283/283 - 1s - loss: 0.2608 - accuracy: 0.9271 - val_loss: 1.3656 - val_accuracy: 0.7837 - 1s/epoch - 4ms/step Epoch 240/300 283/283 - 1s - loss: 0.2616 - accuracy: 0.9259 - val_loss: 1.2925 - val_accuracy: 0.7877 - 1s/epoch - 4ms/step Epoch 241/300 283/283 - 1s - loss: 0.2922 - accuracy: 0.9136 - val_loss: 1.3443 - val_accuracy: 0.7933 - 1s/epoch - 4ms/step Epoch 242/300 283/283 - 1s - loss: 0.2366 - accuracy: 0.9292 - val_loss: 1.3947 - val_accuracy: 0.7853 - 1s/epoch - 4ms/step Epoch 243/300 283/283 - 1s - loss: 0.2341 - accuracy: 0.9338 - val_loss: 1.2420 - val_accuracy: 0.7903 - 1s/epoch - 4ms/step Epoch 244/300 283/283 - 1s - loss: 0.2340 - accuracy: 0.9355 - val_loss: 1.2221 - val_accuracy: 0.7947 - 1s/epoch - 4ms/step Epoch 245/300 283/283 - 1s - loss: 0.2300 - accuracy: 0.9300 - val_loss: 1.3198 - val_accuracy: 0.7917 - 1s/epoch - 4ms/step Epoch 246/300 283/283 - 1s - loss: 0.2281 - accuracy: 0.9356 - val_loss: 1.3303 - val_accuracy: 0.7787 - 1s/epoch - 4ms/step Epoch 247/300 283/283 - 1s - loss: 0.2820 - accuracy: 0.9210 - val_loss: 1.5014 - val_accuracy: 0.7430 - 1s/epoch - 4ms/step Epoch 248/300 283/283 - 1s - loss: 0.3142 - accuracy: 0.9096 - val_loss: 1.2643 - val_accuracy: 0.7890 - 1s/epoch - 4ms/step Epoch 249/300 283/283 - 1s - loss: 0.2232 - accuracy: 0.9366 - val_loss: 1.3945 - val_accuracy: 0.7850 - 1s/epoch - 4ms/step Epoch 250/300 283/283 - 1s - loss: 0.2417 - accuracy: 0.9317 - val_loss: 1.1633 - val_accuracy: 0.8087 - 1s/epoch - 4ms/step Epoch 251/300 283/283 - 1s - loss: 0.2471 - accuracy: 0.9266 - val_loss: 1.3094 - val_accuracy: 0.7933 - 1s/epoch - 4ms/step Epoch 252/300 283/283 - 1s - loss: 0.2481 - accuracy: 0.9301 - val_loss: 1.2007 - val_accuracy: 0.7913 - 1s/epoch - 4ms/step Epoch 253/300 283/283 - 1s - loss: 0.2323 - accuracy: 0.9321 - val_loss: 1.4629 - val_accuracy: 0.7833 - 1s/epoch - 4ms/step Epoch 254/300 283/283 - 1s - loss: 0.2387 - accuracy: 0.9301 - val_loss: 1.1606 - val_accuracy: 0.7973 - 1s/epoch - 4ms/step Epoch 255/300 283/283 - 1s - loss: 0.2372 - accuracy: 0.9309 - val_loss: 1.3615 - val_accuracy: 0.7903 - 1s/epoch - 4ms/step Epoch 256/300 283/283 - 1s - loss: 0.2327 - accuracy: 0.9339 - val_loss: 1.4382 - val_accuracy: 0.7817 - 1s/epoch - 4ms/step Epoch 257/300 283/283 - 1s - loss: 0.2281 - accuracy: 0.9363 - val_loss: 1.3481 - val_accuracy: 0.7843 - 1s/epoch - 4ms/step Epoch 258/300 283/283 - 1s - loss: 0.2380 - accuracy: 0.9359 - val_loss: 1.4001 - val_accuracy: 0.7777 - 1s/epoch - 4ms/step Epoch 259/300 283/283 - 1s - loss: 0.2575 - accuracy: 0.9265 - val_loss: 1.3470 - val_accuracy: 0.7790 - 1s/epoch - 4ms/step Epoch 260/300 283/283 - 1s - loss: 0.2250 - accuracy: 0.9340 - val_loss: 1.2973 - val_accuracy: 0.8003 - 1s/epoch - 4ms/step Epoch 261/300 283/283 - 1s - loss: 0.2142 - accuracy: 0.9401 - val_loss: 1.5075 - val_accuracy: 0.7830 - 1s/epoch - 4ms/step Epoch 262/300 283/283 - 1s - loss: 0.2269 - accuracy: 0.9362 - val_loss: 1.2600 - val_accuracy: 0.8020 - 1s/epoch - 4ms/step Epoch 263/300 283/283 - 1s - loss: 0.2014 - accuracy: 0.9430 - val_loss: 1.3622 - val_accuracy: 0.7970 - 1s/epoch - 4ms/step Epoch 264/300 283/283 - 1s - loss: 0.2488 - accuracy: 0.9319 - val_loss: 1.3931 - val_accuracy: 0.7653 - 1s/epoch - 4ms/step Epoch 265/300 283/283 - 1s - loss: 0.3065 - accuracy: 0.9127 - val_loss: 1.4978 - val_accuracy: 0.7777 - 1s/epoch - 4ms/step Epoch 266/300 283/283 - 1s - loss: 0.2399 - accuracy: 0.9312 - val_loss: 1.3711 - val_accuracy: 0.7913 - 1s/epoch - 4ms/step Epoch 267/300 283/283 - 1s - loss: 0.2254 - accuracy: 0.9384 - val_loss: 1.6844 - val_accuracy: 0.7587 - 1s/epoch - 4ms/step Epoch 268/300 283/283 - 1s - loss: 0.2348 - accuracy: 0.9334 - val_loss: 1.4405 - val_accuracy: 0.7907 - 1s/epoch - 4ms/step Epoch 269/300 283/283 - 1s - loss: 0.2065 - accuracy: 0.9448 - val_loss: 1.4914 - val_accuracy: 0.7837 - 1s/epoch - 4ms/step Epoch 270/300 283/283 - 1s - loss: 0.2048 - accuracy: 0.9427 - val_loss: 1.4739 - val_accuracy: 0.7953 - 1s/epoch - 4ms/step Epoch 271/300 283/283 - 1s - loss: 0.2261 - accuracy: 0.9366 - val_loss: 1.4983 - val_accuracy: 0.7660 - 1s/epoch - 4ms/step Epoch 272/300 283/283 - 1s - loss: 0.2489 - accuracy: 0.9255 - val_loss: 1.3971 - val_accuracy: 0.7940 - 1s/epoch - 4ms/step Epoch 273/300 283/283 - 1s - loss: 0.2570 - accuracy: 0.9294 - val_loss: 1.3070 - val_accuracy: 0.7837 - 1s/epoch - 4ms/step Epoch 274/300 283/283 - 1s - loss: 0.2715 - accuracy: 0.9232 - val_loss: 1.5456 - val_accuracy: 0.7627 - 1s/epoch - 4ms/step Epoch 275/300 283/283 - 1s - loss: 0.2110 - accuracy: 0.9421 - val_loss: 1.3800 - val_accuracy: 0.7927 - 1s/epoch - 4ms/step Epoch 276/300 283/283 - 1s - loss: 0.2049 - accuracy: 0.9472 - val_loss: 1.4535 - val_accuracy: 0.7913 - 1s/epoch - 4ms/step Epoch 277/300 283/283 - 1s - loss: 0.2410 - accuracy: 0.9314 - val_loss: 1.2585 - val_accuracy: 0.7903 - 1s/epoch - 4ms/step Epoch 278/300 283/283 - 1s - loss: 0.2122 - accuracy: 0.9405 - val_loss: 1.4418 - val_accuracy: 0.7923 - 1s/epoch - 4ms/step Epoch 279/300 283/283 - 1s - loss: 0.2168 - accuracy: 0.9415 - val_loss: 1.4003 - val_accuracy: 0.8003 - 1s/epoch - 4ms/step Epoch 280/300 283/283 - 1s - loss: 0.2182 - accuracy: 0.9393 - val_loss: 1.4800 - val_accuracy: 0.7913 - 1s/epoch - 5ms/step Epoch 281/300 283/283 - 1s - loss: 0.2121 - accuracy: 0.9425 - val_loss: 1.6331 - val_accuracy: 0.7670 - 1s/epoch - 4ms/step Epoch 282/300 283/283 - 1s - loss: 0.2393 - accuracy: 0.9361 - val_loss: 1.3554 - val_accuracy: 0.7807 - 1s/epoch - 4ms/step Epoch 283/300 283/283 - 1s - loss: 0.2375 - accuracy: 0.9348 - val_loss: 1.2665 - val_accuracy: 0.7997 - 1s/epoch - 4ms/step Epoch 284/300 283/283 - 1s - loss: 0.2241 - accuracy: 0.9376 - val_loss: 1.3791 - val_accuracy: 0.7880 - 1s/epoch - 4ms/step Epoch 285/300 283/283 - 1s - loss: 0.2161 - accuracy: 0.9377 - val_loss: 1.4283 - val_accuracy: 0.7860 - 1s/epoch - 4ms/step Epoch 286/300 283/283 - 1s - loss: 0.2070 - accuracy: 0.9424 - val_loss: 1.3078 - val_accuracy: 0.7933 - 1s/epoch - 4ms/step Epoch 287/300 283/283 - 1s - loss: 0.2293 - accuracy: 0.9372 - val_loss: 1.4155 - val_accuracy: 0.7837 - 1s/epoch - 4ms/step Epoch 288/300 283/283 - 1s - loss: 0.2143 - accuracy: 0.9402 - val_loss: 1.5785 - val_accuracy: 0.7907 - 1s/epoch - 4ms/step Epoch 289/300 283/283 - 1s - loss: 0.2111 - accuracy: 0.9406 - val_loss: 1.4920 - val_accuracy: 0.7820 - 1s/epoch - 4ms/step Epoch 290/300 283/283 - 1s - loss: 0.2112 - accuracy: 0.9389 - val_loss: 1.4132 - val_accuracy: 0.7863 - 1s/epoch - 4ms/step Epoch 291/300 283/283 - 1s - loss: 0.2288 - accuracy: 0.9389 - val_loss: 1.4595 - val_accuracy: 0.7853 - 1s/epoch - 4ms/step Epoch 292/300 283/283 - 1s - loss: 0.2671 - accuracy: 0.9260 - val_loss: 1.5033 - val_accuracy: 0.7873 - 1s/epoch - 4ms/step Epoch 293/300 283/283 - 1s - loss: 0.1844 - accuracy: 0.9496 - val_loss: 1.5035 - val_accuracy: 0.7830 - 1s/epoch - 4ms/step Epoch 294/300 283/283 - 1s - loss: 0.2185 - accuracy: 0.9394 - val_loss: 1.5891 - val_accuracy: 0.7500 - 1s/epoch - 4ms/step Epoch 295/300 283/283 - 1s - loss: 0.2321 - accuracy: 0.9360 - val_loss: 1.4772 - val_accuracy: 0.7820 - 1s/epoch - 4ms/step Epoch 296/300 283/283 - 1s - loss: 0.2049 - accuracy: 0.9447 - val_loss: 1.5046 - val_accuracy: 0.7887 - 1s/epoch - 4ms/step Epoch 297/300 283/283 - 1s - loss: 0.2021 - accuracy: 0.9463 - val_loss: 1.5621 - val_accuracy: 0.7780 - 1s/epoch - 4ms/step Epoch 298/300 283/283 - 1s - loss: 0.2016 - accuracy: 0.9468 - val_loss: 1.4417 - val_accuracy: 0.7997 - 1s/epoch - 4ms/step Epoch 299/300 283/283 - 1s - loss: 0.2527 - accuracy: 0.9318 - val_loss: 1.4879 - val_accuracy: 0.7970 - 1s/epoch - 4ms/step Epoch 300/300 283/283 - 1s - loss: 0.1939 - accuracy: 0.9465 - val_loss: 1.4999 - val_accuracy: 0.7990 - 1s/epoch - 4ms/step 94/94 - 0s - loss: 1.4999 - accuracy: 0.7990 - 201ms/epoch - 2ms/step Baseline Error: 20.10%
We can clearly see that the model stops learning and basically completely flattens out past around the 100th epoch, it is wise to either lower the learning rate of the model altogether or implement a learning rate scheduler in order to improve the performance of the model.
#try lr scheduler
def lr_schedule(epoch):
initial_lr = 0.001 #default of adam
decay_factor = 0.8
epochs_drop = 10
new_lr = initial_lr * (decay_factor ** (epoch // epochs_drop))
return new_lr
lr_scheduler = LearningRateScheduler(lr_schedule)
model_31new = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(31, 31, 1)),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dropout(0.5),
layers.Dense(15, activation='softmax')
])
model_31new.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model_31new.fit(X_train31, train_labels, validation_data=(X_val31, validation_labels),
epochs=300, batch_size=32, verbose=2, class_weight=class_weight,
callbacks=[lr_scheduler])
scores = model_31new.evaluate(X_val31, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df31.loc[len(df31)] = ['LearningRateDecayModel', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/300 283/283 - 3s - loss: 3.1262 - accuracy: 0.1203 - val_loss: 2.8285 - val_accuracy: 0.1013 - lr: 0.0010 - 3s/epoch - 11ms/step Epoch 2/300 283/283 - 1s - loss: 2.7520 - accuracy: 0.1954 - val_loss: 2.4456 - val_accuracy: 0.1913 - lr: 0.0010 - 1s/epoch - 4ms/step Epoch 3/300 283/283 - 1s - loss: 2.4816 - accuracy: 0.2521 - val_loss: 2.2994 - val_accuracy: 0.2377 - lr: 0.0010 - 1s/epoch - 4ms/step Epoch 4/300 283/283 - 1s - loss: 2.2671 - accuracy: 0.3042 - val_loss: 2.0560 - val_accuracy: 0.3273 - lr: 0.0010 - 1s/epoch - 4ms/step Epoch 5/300 283/283 - 1s - loss: 2.1091 - accuracy: 0.3388 - val_loss: 1.9396 - val_accuracy: 0.3613 - lr: 0.0010 - 1s/epoch - 4ms/step Epoch 6/300 283/283 - 1s - loss: 2.0190 - accuracy: 0.3658 - val_loss: 1.9670 - val_accuracy: 0.3650 - lr: 0.0010 - 1s/epoch - 4ms/step Epoch 7/300 283/283 - 1s - loss: 1.9097 - accuracy: 0.3961 - val_loss: 1.8025 - val_accuracy: 0.3957 - lr: 0.0010 - 1s/epoch - 4ms/step Epoch 8/300 283/283 - 1s - loss: 1.8434 - accuracy: 0.4201 - val_loss: 1.7814 - val_accuracy: 0.4053 - lr: 0.0010 - 1s/epoch - 4ms/step Epoch 9/300 283/283 - 1s - loss: 1.7674 - accuracy: 0.4372 - val_loss: 1.8203 - val_accuracy: 0.4170 - lr: 0.0010 - 1s/epoch - 4ms/step Epoch 10/300 283/283 - 1s - loss: 1.7073 - accuracy: 0.4608 - val_loss: 1.5570 - val_accuracy: 0.4907 - lr: 0.0010 - 1s/epoch - 4ms/step Epoch 11/300 283/283 - 1s - loss: 1.6102 - accuracy: 0.4760 - val_loss: 1.4891 - val_accuracy: 0.5227 - lr: 8.0000e-04 - 1s/epoch - 4ms/step Epoch 12/300 283/283 - 1s - loss: 1.5752 - accuracy: 0.4884 - val_loss: 1.4478 - val_accuracy: 0.5253 - lr: 8.0000e-04 - 1s/epoch - 4ms/step Epoch 13/300 283/283 - 1s - loss: 1.5160 - accuracy: 0.5122 - val_loss: 1.3675 - val_accuracy: 0.5613 - lr: 8.0000e-04 - 1s/epoch - 4ms/step Epoch 14/300 283/283 - 1s - loss: 1.5088 - accuracy: 0.5144 - val_loss: 1.4156 - val_accuracy: 0.5607 - lr: 8.0000e-04 - 1s/epoch - 4ms/step Epoch 15/300 283/283 - 1s - loss: 1.4611 - accuracy: 0.5308 - val_loss: 1.4035 - val_accuracy: 0.5413 - lr: 8.0000e-04 - 1s/epoch - 4ms/step Epoch 16/300 283/283 - 1s - loss: 1.4154 - accuracy: 0.5393 - val_loss: 1.3792 - val_accuracy: 0.5550 - lr: 8.0000e-04 - 1s/epoch - 4ms/step Epoch 17/300 283/283 - 1s - loss: 1.3879 - accuracy: 0.5457 - val_loss: 1.2724 - val_accuracy: 0.5910 - lr: 8.0000e-04 - 1s/epoch - 4ms/step Epoch 18/300 283/283 - 1s - loss: 1.3554 - accuracy: 0.5600 - val_loss: 1.2767 - val_accuracy: 0.5823 - lr: 8.0000e-04 - 1s/epoch - 4ms/step Epoch 19/300 283/283 - 1s - loss: 1.3238 - accuracy: 0.5651 - val_loss: 1.2085 - val_accuracy: 0.6110 - lr: 8.0000e-04 - 1s/epoch - 4ms/step Epoch 20/300 283/283 - 1s - loss: 1.3134 - accuracy: 0.5683 - val_loss: 1.1897 - val_accuracy: 0.6010 - lr: 8.0000e-04 - 1s/epoch - 4ms/step Epoch 21/300 283/283 - 1s - loss: 1.2724 - accuracy: 0.5807 - val_loss: 1.1938 - val_accuracy: 0.6070 - lr: 6.4000e-04 - 1s/epoch - 4ms/step Epoch 22/300 283/283 - 1s - loss: 1.2618 - accuracy: 0.5851 - val_loss: 1.1338 - val_accuracy: 0.6370 - lr: 6.4000e-04 - 1s/epoch - 4ms/step Epoch 23/300 283/283 - 1s - loss: 1.2093 - accuracy: 0.6020 - val_loss: 1.1791 - val_accuracy: 0.6203 - lr: 6.4000e-04 - 1s/epoch - 4ms/step Epoch 24/300 283/283 - 1s - loss: 1.2154 - accuracy: 0.6023 - val_loss: 1.0927 - val_accuracy: 0.6487 - lr: 6.4000e-04 - 1s/epoch - 4ms/step Epoch 25/300 283/283 - 1s - loss: 1.1767 - accuracy: 0.6151 - val_loss: 1.0704 - val_accuracy: 0.6620 - lr: 6.4000e-04 - 1s/epoch - 4ms/step Epoch 26/300 283/283 - 1s - loss: 1.1437 - accuracy: 0.6164 - val_loss: 1.1386 - val_accuracy: 0.6387 - lr: 6.4000e-04 - 1s/epoch - 4ms/step Epoch 27/300 283/283 - 1s - loss: 1.1682 - accuracy: 0.6153 - val_loss: 1.2084 - val_accuracy: 0.6213 - lr: 6.4000e-04 - 1s/epoch - 4ms/step Epoch 28/300 283/283 - 1s - loss: 1.1541 - accuracy: 0.6179 - val_loss: 1.0421 - val_accuracy: 0.6687 - lr: 6.4000e-04 - 1s/epoch - 4ms/step Epoch 29/300 283/283 - 1s - loss: 1.1182 - accuracy: 0.6257 - val_loss: 1.1686 - val_accuracy: 0.6317 - lr: 6.4000e-04 - 1s/epoch - 5ms/step Epoch 30/300 283/283 - 1s - loss: 1.1149 - accuracy: 0.6270 - val_loss: 1.1002 - val_accuracy: 0.6447 - lr: 6.4000e-04 - 1s/epoch - 5ms/step Epoch 31/300 283/283 - 1s - loss: 1.0759 - accuracy: 0.6388 - val_loss: 1.0610 - val_accuracy: 0.6683 - lr: 5.1200e-04 - 1s/epoch - 4ms/step Epoch 32/300 283/283 - 1s - loss: 1.0744 - accuracy: 0.6400 - val_loss: 1.1071 - val_accuracy: 0.6510 - lr: 5.1200e-04 - 1s/epoch - 4ms/step Epoch 33/300 283/283 - 1s - loss: 1.0651 - accuracy: 0.6490 - val_loss: 0.9978 - val_accuracy: 0.6863 - lr: 5.1200e-04 - 1s/epoch - 4ms/step Epoch 34/300 283/283 - 1s - loss: 1.0553 - accuracy: 0.6492 - val_loss: 1.0409 - val_accuracy: 0.6663 - lr: 5.1200e-04 - 1s/epoch - 4ms/step Epoch 35/300 283/283 - 1s - loss: 1.0477 - accuracy: 0.6533 - val_loss: 0.9910 - val_accuracy: 0.6873 - lr: 5.1200e-04 - 1s/epoch - 4ms/step Epoch 36/300 283/283 - 1s - loss: 1.0201 - accuracy: 0.6576 - val_loss: 0.9862 - val_accuracy: 0.6867 - lr: 5.1200e-04 - 1s/epoch - 4ms/step Epoch 37/300 283/283 - 1s - loss: 1.0305 - accuracy: 0.6577 - val_loss: 1.0031 - val_accuracy: 0.6833 - lr: 5.1200e-04 - 1s/epoch - 4ms/step Epoch 38/300 283/283 - 1s - loss: 0.9966 - accuracy: 0.6669 - val_loss: 1.0698 - val_accuracy: 0.6690 - lr: 5.1200e-04 - 1s/epoch - 4ms/step Epoch 39/300 283/283 - 1s - loss: 0.9981 - accuracy: 0.6661 - val_loss: 0.9631 - val_accuracy: 0.7000 - lr: 5.1200e-04 - 1s/epoch - 4ms/step Epoch 40/300 283/283 - 1s - loss: 0.9859 - accuracy: 0.6711 - val_loss: 1.0407 - val_accuracy: 0.6727 - lr: 5.1200e-04 - 1s/epoch - 4ms/step Epoch 41/300 283/283 - 1s - loss: 0.9519 - accuracy: 0.6765 - val_loss: 0.9758 - val_accuracy: 0.6930 - lr: 4.0960e-04 - 1s/epoch - 4ms/step Epoch 42/300 283/283 - 1s - loss: 0.9337 - accuracy: 0.6841 - val_loss: 1.0135 - val_accuracy: 0.6833 - lr: 4.0960e-04 - 1s/epoch - 4ms/step Epoch 43/300 283/283 - 1s - loss: 0.9454 - accuracy: 0.6815 - val_loss: 0.9429 - val_accuracy: 0.7113 - lr: 4.0960e-04 - 1s/epoch - 4ms/step Epoch 44/300 283/283 - 1s - loss: 0.9498 - accuracy: 0.6778 - val_loss: 0.9800 - val_accuracy: 0.6923 - lr: 4.0960e-04 - 1s/epoch - 4ms/step Epoch 45/300 283/283 - 1s - loss: 0.9292 - accuracy: 0.6831 - val_loss: 0.9825 - val_accuracy: 0.6937 - lr: 4.0960e-04 - 1s/epoch - 4ms/step Epoch 46/300 283/283 - 1s - loss: 0.9357 - accuracy: 0.6810 - val_loss: 0.9067 - val_accuracy: 0.7137 - lr: 4.0960e-04 - 1s/epoch - 4ms/step Epoch 47/300 283/283 - 1s - loss: 0.8997 - accuracy: 0.6877 - val_loss: 0.9716 - val_accuracy: 0.7017 - lr: 4.0960e-04 - 1s/epoch - 4ms/step Epoch 48/300 283/283 - 1s - loss: 0.9119 - accuracy: 0.6924 - val_loss: 0.9313 - val_accuracy: 0.7097 - lr: 4.0960e-04 - 1s/epoch - 4ms/step Epoch 49/300 283/283 - 1s - loss: 0.8961 - accuracy: 0.6982 - val_loss: 0.9830 - val_accuracy: 0.7043 - lr: 4.0960e-04 - 1s/epoch - 4ms/step Epoch 50/300 283/283 - 1s - loss: 0.8786 - accuracy: 0.6955 - val_loss: 1.0134 - val_accuracy: 0.6947 - lr: 4.0960e-04 - 1s/epoch - 4ms/step Epoch 51/300 283/283 - 1s - loss: 0.8559 - accuracy: 0.7054 - val_loss: 0.9076 - val_accuracy: 0.7243 - lr: 3.2768e-04 - 1s/epoch - 4ms/step Epoch 52/300 283/283 - 1s - loss: 0.8579 - accuracy: 0.7074 - val_loss: 0.8980 - val_accuracy: 0.7213 - lr: 3.2768e-04 - 1s/epoch - 4ms/step Epoch 53/300 283/283 - 1s - loss: 0.8676 - accuracy: 0.7044 - val_loss: 0.8940 - val_accuracy: 0.7293 - lr: 3.2768e-04 - 1s/epoch - 4ms/step Epoch 54/300 283/283 - 1s - loss: 0.8690 - accuracy: 0.7043 - val_loss: 0.9082 - val_accuracy: 0.7173 - lr: 3.2768e-04 - 1s/epoch - 4ms/step Epoch 55/300 283/283 - 1s - loss: 0.8432 - accuracy: 0.7082 - val_loss: 0.9318 - val_accuracy: 0.7183 - lr: 3.2768e-04 - 1s/epoch - 4ms/step Epoch 56/300 283/283 - 1s - loss: 0.8467 - accuracy: 0.7103 - val_loss: 0.9698 - val_accuracy: 0.7090 - lr: 3.2768e-04 - 1s/epoch - 4ms/step Epoch 57/300 283/283 - 1s - loss: 0.8302 - accuracy: 0.7175 - val_loss: 0.9555 - val_accuracy: 0.7133 - lr: 3.2768e-04 - 1s/epoch - 4ms/step Epoch 58/300 283/283 - 1s - loss: 0.8391 - accuracy: 0.7117 - val_loss: 0.9670 - val_accuracy: 0.7033 - lr: 3.2768e-04 - 1s/epoch - 4ms/step Epoch 59/300 283/283 - 1s - loss: 0.8249 - accuracy: 0.7126 - val_loss: 0.9110 - val_accuracy: 0.7247 - lr: 3.2768e-04 - 1s/epoch - 4ms/step Epoch 60/300 283/283 - 1s - loss: 0.8282 - accuracy: 0.7189 - val_loss: 0.9228 - val_accuracy: 0.7243 - lr: 3.2768e-04 - 1s/epoch - 4ms/step Epoch 61/300 283/283 - 1s - loss: 0.8170 - accuracy: 0.7220 - val_loss: 0.8991 - val_accuracy: 0.7320 - lr: 2.6214e-04 - 1s/epoch - 4ms/step Epoch 62/300 283/283 - 1s - loss: 0.8038 - accuracy: 0.7202 - val_loss: 0.9298 - val_accuracy: 0.7260 - lr: 2.6214e-04 - 1s/epoch - 4ms/step Epoch 63/300 283/283 - 1s - loss: 0.8110 - accuracy: 0.7205 - val_loss: 0.8978 - val_accuracy: 0.7310 - lr: 2.6214e-04 - 1s/epoch - 4ms/step Epoch 64/300 283/283 - 1s - loss: 0.7965 - accuracy: 0.7272 - val_loss: 0.8924 - val_accuracy: 0.7287 - lr: 2.6214e-04 - 1s/epoch - 4ms/step Epoch 65/300 283/283 - 1s - loss: 0.7776 - accuracy: 0.7301 - val_loss: 0.8942 - val_accuracy: 0.7397 - lr: 2.6214e-04 - 1s/epoch - 4ms/step Epoch 66/300 283/283 - 1s - loss: 0.7934 - accuracy: 0.7264 - val_loss: 0.9054 - val_accuracy: 0.7290 - lr: 2.6214e-04 - 1s/epoch - 4ms/step Epoch 67/300 283/283 - 1s - loss: 0.7787 - accuracy: 0.7342 - val_loss: 0.8894 - val_accuracy: 0.7360 - lr: 2.6214e-04 - 1s/epoch - 4ms/step Epoch 68/300 283/283 - 1s - loss: 0.7944 - accuracy: 0.7260 - val_loss: 0.9075 - val_accuracy: 0.7347 - lr: 2.6214e-04 - 1s/epoch - 4ms/step Epoch 69/300 283/283 - 1s - loss: 0.7826 - accuracy: 0.7281 - val_loss: 0.9049 - val_accuracy: 0.7377 - lr: 2.6214e-04 - 1s/epoch - 4ms/step Epoch 70/300 283/283 - 1s - loss: 0.7961 - accuracy: 0.7262 - val_loss: 0.9088 - val_accuracy: 0.7310 - lr: 2.6214e-04 - 1s/epoch - 4ms/step Epoch 71/300 283/283 - 1s - loss: 0.7512 - accuracy: 0.7374 - val_loss: 0.8849 - val_accuracy: 0.7387 - lr: 2.0972e-04 - 1s/epoch - 4ms/step Epoch 72/300 283/283 - 1s - loss: 0.7471 - accuracy: 0.7446 - val_loss: 0.8757 - val_accuracy: 0.7427 - lr: 2.0972e-04 - 1s/epoch - 4ms/step Epoch 73/300 283/283 - 1s - loss: 0.7457 - accuracy: 0.7418 - val_loss: 0.9002 - val_accuracy: 0.7293 - lr: 2.0972e-04 - 1s/epoch - 4ms/step Epoch 74/300 283/283 - 1s - loss: 0.7565 - accuracy: 0.7327 - val_loss: 0.8542 - val_accuracy: 0.7453 - lr: 2.0972e-04 - 1s/epoch - 4ms/step Epoch 75/300 283/283 - 1s - loss: 0.7500 - accuracy: 0.7432 - val_loss: 0.8801 - val_accuracy: 0.7400 - lr: 2.0972e-04 - 1s/epoch - 4ms/step Epoch 76/300 283/283 - 1s - loss: 0.7535 - accuracy: 0.7366 - val_loss: 0.9296 - val_accuracy: 0.7297 - lr: 2.0972e-04 - 1s/epoch - 5ms/step Epoch 77/300 283/283 - 1s - loss: 0.7480 - accuracy: 0.7399 - val_loss: 0.9334 - val_accuracy: 0.7307 - lr: 2.0972e-04 - 1s/epoch - 4ms/step Epoch 78/300 283/283 - 1s - loss: 0.7472 - accuracy: 0.7396 - val_loss: 0.8630 - val_accuracy: 0.7443 - lr: 2.0972e-04 - 1s/epoch - 4ms/step Epoch 79/300 283/283 - 1s - loss: 0.7367 - accuracy: 0.7459 - val_loss: 0.8894 - val_accuracy: 0.7393 - lr: 2.0972e-04 - 1s/epoch - 4ms/step Epoch 80/300 283/283 - 1s - loss: 0.7234 - accuracy: 0.7497 - val_loss: 0.8701 - val_accuracy: 0.7453 - lr: 2.0972e-04 - 1s/epoch - 4ms/step Epoch 81/300 283/283 - 1s - loss: 0.7253 - accuracy: 0.7452 - val_loss: 0.8834 - val_accuracy: 0.7420 - lr: 1.6777e-04 - 1s/epoch - 4ms/step Epoch 82/300 283/283 - 1s - loss: 0.7195 - accuracy: 0.7487 - val_loss: 0.9345 - val_accuracy: 0.7263 - lr: 1.6777e-04 - 1s/epoch - 4ms/step Epoch 83/300 283/283 - 1s - loss: 0.7140 - accuracy: 0.7480 - val_loss: 0.8841 - val_accuracy: 0.7483 - lr: 1.6777e-04 - 1s/epoch - 4ms/step Epoch 84/300 283/283 - 1s - loss: 0.7314 - accuracy: 0.7450 - val_loss: 0.8675 - val_accuracy: 0.7480 - lr: 1.6777e-04 - 1s/epoch - 4ms/step Epoch 85/300 283/283 - 1s - loss: 0.7192 - accuracy: 0.7527 - val_loss: 0.8897 - val_accuracy: 0.7420 - lr: 1.6777e-04 - 1s/epoch - 4ms/step Epoch 86/300 283/283 - 1s - loss: 0.7224 - accuracy: 0.7492 - val_loss: 0.8603 - val_accuracy: 0.7487 - lr: 1.6777e-04 - 1s/epoch - 4ms/step Epoch 87/300 283/283 - 1s - loss: 0.7237 - accuracy: 0.7455 - val_loss: 0.8768 - val_accuracy: 0.7503 - lr: 1.6777e-04 - 1s/epoch - 4ms/step Epoch 88/300 283/283 - 1s - loss: 0.7029 - accuracy: 0.7591 - val_loss: 0.8919 - val_accuracy: 0.7413 - lr: 1.6777e-04 - 1s/epoch - 4ms/step Epoch 89/300 283/283 - 1s - loss: 0.7186 - accuracy: 0.7428 - val_loss: 0.9010 - val_accuracy: 0.7407 - lr: 1.6777e-04 - 1s/epoch - 4ms/step Epoch 90/300 283/283 - 1s - loss: 0.6969 - accuracy: 0.7545 - val_loss: 0.8847 - val_accuracy: 0.7457 - lr: 1.6777e-04 - 1s/epoch - 4ms/step Epoch 91/300 283/283 - 1s - loss: 0.6935 - accuracy: 0.7555 - val_loss: 0.9366 - val_accuracy: 0.7390 - lr: 1.3422e-04 - 1s/epoch - 4ms/step Epoch 92/300 283/283 - 1s - loss: 0.6867 - accuracy: 0.7627 - val_loss: 0.8943 - val_accuracy: 0.7447 - lr: 1.3422e-04 - 1s/epoch - 4ms/step Epoch 93/300 283/283 - 1s - loss: 0.7013 - accuracy: 0.7533 - val_loss: 0.9000 - val_accuracy: 0.7530 - lr: 1.3422e-04 - 1s/epoch - 4ms/step Epoch 94/300 283/283 - 1s - loss: 0.6989 - accuracy: 0.7605 - val_loss: 0.8869 - val_accuracy: 0.7477 - lr: 1.3422e-04 - 1s/epoch - 4ms/step Epoch 95/300 283/283 - 1s - loss: 0.6950 - accuracy: 0.7586 - val_loss: 0.8592 - val_accuracy: 0.7530 - lr: 1.3422e-04 - 1s/epoch - 4ms/step Epoch 96/300 283/283 - 1s - loss: 0.6842 - accuracy: 0.7620 - val_loss: 0.9051 - val_accuracy: 0.7483 - lr: 1.3422e-04 - 1s/epoch - 4ms/step Epoch 97/300 283/283 - 1s - loss: 0.6771 - accuracy: 0.7607 - val_loss: 0.9001 - val_accuracy: 0.7477 - lr: 1.3422e-04 - 1s/epoch - 4ms/step Epoch 98/300 283/283 - 1s - loss: 0.6728 - accuracy: 0.7656 - val_loss: 0.8863 - val_accuracy: 0.7530 - lr: 1.3422e-04 - 1s/epoch - 4ms/step Epoch 99/300 283/283 - 1s - loss: 0.6866 - accuracy: 0.7621 - val_loss: 0.8832 - val_accuracy: 0.7493 - lr: 1.3422e-04 - 1s/epoch - 4ms/step Epoch 100/300 283/283 - 1s - loss: 0.6739 - accuracy: 0.7655 - val_loss: 0.8887 - val_accuracy: 0.7470 - lr: 1.3422e-04 - 1s/epoch - 4ms/step Epoch 101/300 283/283 - 1s - loss: 0.6644 - accuracy: 0.7656 - val_loss: 0.9022 - val_accuracy: 0.7507 - lr: 1.0737e-04 - 1s/epoch - 4ms/step Epoch 102/300 283/283 - 1s - loss: 0.6778 - accuracy: 0.7636 - val_loss: 0.8725 - val_accuracy: 0.7533 - lr: 1.0737e-04 - 1s/epoch - 4ms/step Epoch 103/300 283/283 - 1s - loss: 0.6579 - accuracy: 0.7674 - val_loss: 0.8940 - val_accuracy: 0.7557 - lr: 1.0737e-04 - 1s/epoch - 4ms/step Epoch 104/300 283/283 - 1s - loss: 0.6925 - accuracy: 0.7563 - val_loss: 0.8806 - val_accuracy: 0.7513 - lr: 1.0737e-04 - 1s/epoch - 4ms/step Epoch 105/300 283/283 - 1s - loss: 0.6623 - accuracy: 0.7687 - val_loss: 0.9102 - val_accuracy: 0.7473 - lr: 1.0737e-04 - 1s/epoch - 4ms/step Epoch 106/300 283/283 - 1s - loss: 0.6648 - accuracy: 0.7671 - val_loss: 0.8614 - val_accuracy: 0.7550 - lr: 1.0737e-04 - 1s/epoch - 4ms/step Epoch 107/300 283/283 - 1s - loss: 0.6609 - accuracy: 0.7720 - val_loss: 0.8816 - val_accuracy: 0.7550 - lr: 1.0737e-04 - 1s/epoch - 4ms/step Epoch 108/300 283/283 - 1s - loss: 0.6712 - accuracy: 0.7687 - val_loss: 0.8857 - val_accuracy: 0.7573 - lr: 1.0737e-04 - 1s/epoch - 4ms/step Epoch 109/300 283/283 - 1s - loss: 0.6723 - accuracy: 0.7650 - val_loss: 0.8914 - val_accuracy: 0.7537 - lr: 1.0737e-04 - 1s/epoch - 4ms/step Epoch 110/300 283/283 - 1s - loss: 0.6712 - accuracy: 0.7653 - val_loss: 0.9171 - val_accuracy: 0.7500 - lr: 1.0737e-04 - 1s/epoch - 4ms/step Epoch 111/300 283/283 - 1s - loss: 0.6652 - accuracy: 0.7648 - val_loss: 0.8705 - val_accuracy: 0.7587 - lr: 8.5899e-05 - 1s/epoch - 4ms/step Epoch 112/300 283/283 - 1s - loss: 0.6585 - accuracy: 0.7695 - val_loss: 0.8981 - val_accuracy: 0.7553 - lr: 8.5899e-05 - 1s/epoch - 4ms/step Epoch 113/300 283/283 - 1s - loss: 0.6582 - accuracy: 0.7678 - val_loss: 0.8849 - val_accuracy: 0.7563 - lr: 8.5899e-05 - 1s/epoch - 4ms/step Epoch 114/300 283/283 - 1s - loss: 0.6536 - accuracy: 0.7679 - val_loss: 0.8992 - val_accuracy: 0.7530 - lr: 8.5899e-05 - 1s/epoch - 4ms/step Epoch 115/300 283/283 - 1s - loss: 0.6452 - accuracy: 0.7768 - val_loss: 0.9003 - val_accuracy: 0.7520 - lr: 8.5899e-05 - 1s/epoch - 4ms/step Epoch 116/300 283/283 - 1s - loss: 0.6428 - accuracy: 0.7743 - val_loss: 0.8728 - val_accuracy: 0.7617 - lr: 8.5899e-05 - 1s/epoch - 4ms/step Epoch 117/300 283/283 - 1s - loss: 0.6430 - accuracy: 0.7725 - val_loss: 0.8787 - val_accuracy: 0.7583 - lr: 8.5899e-05 - 1s/epoch - 4ms/step Epoch 118/300 283/283 - 1s - loss: 0.6525 - accuracy: 0.7736 - val_loss: 0.8786 - val_accuracy: 0.7553 - lr: 8.5899e-05 - 1s/epoch - 4ms/step Epoch 119/300 283/283 - 1s - loss: 0.6453 - accuracy: 0.7685 - val_loss: 0.8797 - val_accuracy: 0.7570 - lr: 8.5899e-05 - 1s/epoch - 4ms/step Epoch 120/300 283/283 - 1s - loss: 0.6469 - accuracy: 0.7740 - val_loss: 0.8791 - val_accuracy: 0.7597 - lr: 8.5899e-05 - 1s/epoch - 4ms/step Epoch 121/300 283/283 - 1s - loss: 0.6409 - accuracy: 0.7727 - val_loss: 0.8851 - val_accuracy: 0.7583 - lr: 6.8719e-05 - 1s/epoch - 4ms/step Epoch 122/300 283/283 - 1s - loss: 0.6378 - accuracy: 0.7718 - val_loss: 0.8880 - val_accuracy: 0.7517 - lr: 6.8719e-05 - 1s/epoch - 4ms/step Epoch 123/300 283/283 - 1s - loss: 0.6239 - accuracy: 0.7753 - val_loss: 0.8931 - val_accuracy: 0.7540 - lr: 6.8719e-05 - 1s/epoch - 4ms/step Epoch 124/300 283/283 - 1s - loss: 0.6487 - accuracy: 0.7736 - val_loss: 0.8846 - val_accuracy: 0.7590 - lr: 6.8719e-05 - 1s/epoch - 4ms/step Epoch 125/300 283/283 - 1s - loss: 0.6386 - accuracy: 0.7733 - val_loss: 0.8910 - val_accuracy: 0.7597 - lr: 6.8719e-05 - 1s/epoch - 4ms/step Epoch 126/300 283/283 - 1s - loss: 0.6421 - accuracy: 0.7743 - val_loss: 0.8778 - val_accuracy: 0.7580 - lr: 6.8719e-05 - 1s/epoch - 4ms/step Epoch 127/300 283/283 - 1s - loss: 0.6451 - accuracy: 0.7740 - val_loss: 0.8965 - val_accuracy: 0.7567 - lr: 6.8719e-05 - 1s/epoch - 5ms/step Epoch 128/300 283/283 - 1s - loss: 0.6446 - accuracy: 0.7780 - val_loss: 0.8806 - val_accuracy: 0.7587 - lr: 6.8719e-05 - 1s/epoch - 4ms/step Epoch 129/300 283/283 - 1s - loss: 0.6302 - accuracy: 0.7716 - val_loss: 0.8832 - val_accuracy: 0.7567 - lr: 6.8719e-05 - 1s/epoch - 4ms/step Epoch 130/300 283/283 - 1s - loss: 0.6305 - accuracy: 0.7743 - val_loss: 0.8987 - val_accuracy: 0.7587 - lr: 6.8719e-05 - 1s/epoch - 4ms/step Epoch 131/300 283/283 - 1s - loss: 0.6144 - accuracy: 0.7828 - val_loss: 0.8702 - val_accuracy: 0.7640 - lr: 5.4976e-05 - 1s/epoch - 4ms/step Epoch 132/300 283/283 - 1s - loss: 0.6274 - accuracy: 0.7818 - val_loss: 0.8938 - val_accuracy: 0.7557 - lr: 5.4976e-05 - 1s/epoch - 4ms/step Epoch 133/300 283/283 - 1s - loss: 0.6287 - accuracy: 0.7806 - val_loss: 0.8969 - val_accuracy: 0.7597 - lr: 5.4976e-05 - 1s/epoch - 4ms/step Epoch 134/300 283/283 - 1s - loss: 0.6209 - accuracy: 0.7778 - val_loss: 0.8982 - val_accuracy: 0.7540 - lr: 5.4976e-05 - 1s/epoch - 4ms/step Epoch 135/300 283/283 - 1s - loss: 0.6341 - accuracy: 0.7775 - val_loss: 0.8882 - val_accuracy: 0.7590 - lr: 5.4976e-05 - 1s/epoch - 4ms/step Epoch 136/300 283/283 - 1s - loss: 0.6292 - accuracy: 0.7771 - val_loss: 0.8871 - val_accuracy: 0.7603 - lr: 5.4976e-05 - 1s/epoch - 4ms/step Epoch 137/300 283/283 - 1s - loss: 0.6217 - accuracy: 0.7822 - val_loss: 0.8869 - val_accuracy: 0.7597 - lr: 5.4976e-05 - 1s/epoch - 4ms/step Epoch 138/300 283/283 - 1s - loss: 0.6258 - accuracy: 0.7788 - val_loss: 0.8977 - val_accuracy: 0.7587 - lr: 5.4976e-05 - 1s/epoch - 4ms/step Epoch 139/300 283/283 - 1s - loss: 0.6246 - accuracy: 0.7792 - val_loss: 0.8713 - val_accuracy: 0.7637 - lr: 5.4976e-05 - 1s/epoch - 4ms/step Epoch 140/300 283/283 - 1s - loss: 0.6211 - accuracy: 0.7813 - val_loss: 0.8937 - val_accuracy: 0.7597 - lr: 5.4976e-05 - 1s/epoch - 4ms/step Epoch 141/300 283/283 - 1s - loss: 0.6169 - accuracy: 0.7843 - val_loss: 0.9039 - val_accuracy: 0.7567 - lr: 4.3980e-05 - 1s/epoch - 4ms/step Epoch 142/300 283/283 - 1s - loss: 0.6297 - accuracy: 0.7789 - val_loss: 0.8965 - val_accuracy: 0.7580 - lr: 4.3980e-05 - 1s/epoch - 4ms/step Epoch 143/300 283/283 - 1s - loss: 0.6181 - accuracy: 0.7850 - val_loss: 0.8959 - val_accuracy: 0.7593 - lr: 4.3980e-05 - 1s/epoch - 4ms/step Epoch 144/300 283/283 - 1s - loss: 0.6246 - accuracy: 0.7818 - val_loss: 0.8868 - val_accuracy: 0.7580 - lr: 4.3980e-05 - 1s/epoch - 4ms/step Epoch 145/300 283/283 - 1s - loss: 0.6113 - accuracy: 0.7843 - val_loss: 0.8985 - val_accuracy: 0.7600 - lr: 4.3980e-05 - 1s/epoch - 4ms/step Epoch 146/300 283/283 - 1s - loss: 0.6199 - accuracy: 0.7797 - val_loss: 0.9074 - val_accuracy: 0.7523 - lr: 4.3980e-05 - 1s/epoch - 4ms/step Epoch 147/300 283/283 - 1s - loss: 0.6006 - accuracy: 0.7832 - val_loss: 0.8993 - val_accuracy: 0.7597 - lr: 4.3980e-05 - 1s/epoch - 4ms/step Epoch 148/300 283/283 - 1s - loss: 0.6080 - accuracy: 0.7847 - val_loss: 0.8973 - val_accuracy: 0.7583 - lr: 4.3980e-05 - 1s/epoch - 4ms/step Epoch 149/300 283/283 - 1s - loss: 0.6108 - accuracy: 0.7811 - val_loss: 0.8921 - val_accuracy: 0.7587 - lr: 4.3980e-05 - 1s/epoch - 4ms/step Epoch 150/300 283/283 - 1s - loss: 0.6108 - accuracy: 0.7875 - val_loss: 0.8732 - val_accuracy: 0.7623 - lr: 4.3980e-05 - 1s/epoch - 4ms/step Epoch 151/300 283/283 - 1s - loss: 0.6182 - accuracy: 0.7823 - val_loss: 0.9034 - val_accuracy: 0.7563 - lr: 3.5184e-05 - 1s/epoch - 4ms/step Epoch 152/300 283/283 - 1s - loss: 0.6114 - accuracy: 0.7844 - val_loss: 0.9017 - val_accuracy: 0.7557 - lr: 3.5184e-05 - 1s/epoch - 4ms/step Epoch 153/300 283/283 - 1s - loss: 0.6148 - accuracy: 0.7821 - val_loss: 0.8934 - val_accuracy: 0.7590 - lr: 3.5184e-05 - 1s/epoch - 4ms/step Epoch 154/300 283/283 - 1s - loss: 0.6000 - accuracy: 0.7825 - val_loss: 0.8952 - val_accuracy: 0.7597 - lr: 3.5184e-05 - 1s/epoch - 4ms/step Epoch 155/300 283/283 - 1s - loss: 0.6173 - accuracy: 0.7812 - val_loss: 0.8941 - val_accuracy: 0.7630 - lr: 3.5184e-05 - 1s/epoch - 4ms/step Epoch 156/300 283/283 - 1s - loss: 0.6213 - accuracy: 0.7839 - val_loss: 0.8686 - val_accuracy: 0.7647 - lr: 3.5184e-05 - 1s/epoch - 4ms/step Epoch 157/300 283/283 - 1s - loss: 0.5949 - accuracy: 0.7908 - val_loss: 0.8970 - val_accuracy: 0.7563 - lr: 3.5184e-05 - 1s/epoch - 4ms/step Epoch 158/300 283/283 - 1s - loss: 0.6071 - accuracy: 0.7941 - val_loss: 0.9025 - val_accuracy: 0.7570 - lr: 3.5184e-05 - 1s/epoch - 4ms/step Epoch 159/300 283/283 - 1s - loss: 0.6042 - accuracy: 0.7820 - val_loss: 0.8829 - val_accuracy: 0.7593 - lr: 3.5184e-05 - 1s/epoch - 4ms/step Epoch 160/300 283/283 - 1s - loss: 0.6154 - accuracy: 0.7806 - val_loss: 0.9066 - val_accuracy: 0.7573 - lr: 3.5184e-05 - 1s/epoch - 4ms/step Epoch 161/300 283/283 - 1s - loss: 0.6107 - accuracy: 0.7822 - val_loss: 0.8867 - val_accuracy: 0.7610 - lr: 2.8147e-05 - 1s/epoch - 4ms/step Epoch 162/300 283/283 - 1s - loss: 0.5950 - accuracy: 0.7858 - val_loss: 0.8991 - val_accuracy: 0.7577 - lr: 2.8147e-05 - 1s/epoch - 4ms/step Epoch 163/300 283/283 - 1s - loss: 0.6162 - accuracy: 0.7809 - val_loss: 0.8818 - val_accuracy: 0.7627 - lr: 2.8147e-05 - 1s/epoch - 4ms/step Epoch 164/300 283/283 - 1s - loss: 0.6020 - accuracy: 0.7871 - val_loss: 0.8875 - val_accuracy: 0.7617 - lr: 2.8147e-05 - 1s/epoch - 4ms/step Epoch 165/300 283/283 - 1s - loss: 0.6000 - accuracy: 0.7913 - val_loss: 0.8972 - val_accuracy: 0.7603 - lr: 2.8147e-05 - 1s/epoch - 4ms/step Epoch 166/300 283/283 - 1s - loss: 0.6036 - accuracy: 0.7877 - val_loss: 0.8881 - val_accuracy: 0.7597 - lr: 2.8147e-05 - 1s/epoch - 4ms/step Epoch 167/300 283/283 - 1s - loss: 0.5868 - accuracy: 0.7930 - val_loss: 0.9032 - val_accuracy: 0.7567 - lr: 2.8147e-05 - 1s/epoch - 4ms/step Epoch 168/300 283/283 - 1s - loss: 0.6058 - accuracy: 0.7859 - val_loss: 0.8823 - val_accuracy: 0.7623 - lr: 2.8147e-05 - 1s/epoch - 4ms/step Epoch 169/300 283/283 - 1s - loss: 0.6042 - accuracy: 0.7847 - val_loss: 0.8924 - val_accuracy: 0.7623 - lr: 2.8147e-05 - 1s/epoch - 4ms/step Epoch 170/300 283/283 - 1s - loss: 0.6085 - accuracy: 0.7849 - val_loss: 0.9077 - val_accuracy: 0.7567 - lr: 2.8147e-05 - 1s/epoch - 4ms/step Epoch 171/300 283/283 - 1s - loss: 0.6008 - accuracy: 0.7909 - val_loss: 0.8937 - val_accuracy: 0.7617 - lr: 2.2518e-05 - 1s/epoch - 4ms/step Epoch 172/300 283/283 - 1s - loss: 0.5921 - accuracy: 0.7936 - val_loss: 0.8955 - val_accuracy: 0.7590 - lr: 2.2518e-05 - 1s/epoch - 4ms/step Epoch 173/300 283/283 - 1s - loss: 0.5852 - accuracy: 0.7970 - val_loss: 0.9056 - val_accuracy: 0.7590 - lr: 2.2518e-05 - 1s/epoch - 4ms/step Epoch 174/300 283/283 - 1s - loss: 0.5997 - accuracy: 0.7920 - val_loss: 0.8867 - val_accuracy: 0.7613 - lr: 2.2518e-05 - 1s/epoch - 4ms/step Epoch 175/300 283/283 - 1s - loss: 0.6057 - accuracy: 0.7895 - val_loss: 0.9011 - val_accuracy: 0.7587 - lr: 2.2518e-05 - 1s/epoch - 4ms/step Epoch 176/300 283/283 - 1s - loss: 0.5886 - accuracy: 0.7880 - val_loss: 0.9002 - val_accuracy: 0.7600 - lr: 2.2518e-05 - 1s/epoch - 4ms/step Epoch 177/300 283/283 - 1s - loss: 0.6055 - accuracy: 0.7826 - val_loss: 0.8951 - val_accuracy: 0.7630 - lr: 2.2518e-05 - 1s/epoch - 4ms/step Epoch 178/300 283/283 - 1s - loss: 0.5954 - accuracy: 0.7877 - val_loss: 0.9010 - val_accuracy: 0.7600 - lr: 2.2518e-05 - 1s/epoch - 4ms/step Epoch 179/300 283/283 - 1s - loss: 0.5960 - accuracy: 0.7868 - val_loss: 0.8921 - val_accuracy: 0.7623 - lr: 2.2518e-05 - 1s/epoch - 4ms/step Epoch 180/300 283/283 - 1s - loss: 0.6008 - accuracy: 0.7920 - val_loss: 0.9004 - val_accuracy: 0.7597 - lr: 2.2518e-05 - 1s/epoch - 4ms/step Epoch 181/300 283/283 - 1s - loss: 0.5972 - accuracy: 0.7887 - val_loss: 0.8961 - val_accuracy: 0.7607 - lr: 1.8014e-05 - 1s/epoch - 4ms/step Epoch 182/300 283/283 - 1s - loss: 0.5899 - accuracy: 0.7945 - val_loss: 0.8909 - val_accuracy: 0.7620 - lr: 1.8014e-05 - 1s/epoch - 4ms/step Epoch 183/300 283/283 - 1s - loss: 0.6057 - accuracy: 0.7887 - val_loss: 0.9042 - val_accuracy: 0.7597 - lr: 1.8014e-05 - 1s/epoch - 4ms/step Epoch 184/300 283/283 - 1s - loss: 0.5889 - accuracy: 0.7908 - val_loss: 0.9026 - val_accuracy: 0.7580 - lr: 1.8014e-05 - 1s/epoch - 4ms/step Epoch 185/300 283/283 - 1s - loss: 0.5966 - accuracy: 0.7867 - val_loss: 0.9096 - val_accuracy: 0.7600 - lr: 1.8014e-05 - 1s/epoch - 4ms/step Epoch 186/300 283/283 - 1s - loss: 0.5902 - accuracy: 0.7924 - val_loss: 0.8922 - val_accuracy: 0.7627 - lr: 1.8014e-05 - 1s/epoch - 4ms/step Epoch 187/300 283/283 - 1s - loss: 0.5950 - accuracy: 0.7935 - val_loss: 0.8979 - val_accuracy: 0.7613 - lr: 1.8014e-05 - 1s/epoch - 4ms/step Epoch 188/300 283/283 - 1s - loss: 0.5884 - accuracy: 0.7916 - val_loss: 0.8888 - val_accuracy: 0.7643 - lr: 1.8014e-05 - 1s/epoch - 4ms/step Epoch 189/300 283/283 - 1s - loss: 0.5922 - accuracy: 0.7864 - val_loss: 0.8961 - val_accuracy: 0.7630 - lr: 1.8014e-05 - 1s/epoch - 4ms/step Epoch 190/300 283/283 - 1s - loss: 0.6104 - accuracy: 0.7816 - val_loss: 0.9134 - val_accuracy: 0.7597 - lr: 1.8014e-05 - 1s/epoch - 4ms/step Epoch 191/300 283/283 - 1s - loss: 0.5990 - accuracy: 0.7919 - val_loss: 0.8921 - val_accuracy: 0.7627 - lr: 1.4412e-05 - 1s/epoch - 4ms/step Epoch 192/300 283/283 - 1s - loss: 0.5907 - accuracy: 0.7928 - val_loss: 0.8937 - val_accuracy: 0.7620 - lr: 1.4412e-05 - 1s/epoch - 4ms/step Epoch 193/300 283/283 - 1s - loss: 0.5920 - accuracy: 0.7901 - val_loss: 0.9015 - val_accuracy: 0.7610 - lr: 1.4412e-05 - 1s/epoch - 4ms/step Epoch 194/300 283/283 - 1s - loss: 0.5904 - accuracy: 0.7957 - val_loss: 0.8988 - val_accuracy: 0.7637 - lr: 1.4412e-05 - 1s/epoch - 4ms/step Epoch 195/300 283/283 - 1s - loss: 0.5948 - accuracy: 0.7884 - val_loss: 0.8992 - val_accuracy: 0.7607 - lr: 1.4412e-05 - 1s/epoch - 4ms/step Epoch 196/300 283/283 - 1s - loss: 0.5829 - accuracy: 0.7900 - val_loss: 0.8968 - val_accuracy: 0.7603 - lr: 1.4412e-05 - 1s/epoch - 4ms/step Epoch 197/300 283/283 - 1s - loss: 0.5874 - accuracy: 0.7944 - val_loss: 0.8958 - val_accuracy: 0.7620 - lr: 1.4412e-05 - 1s/epoch - 4ms/step Epoch 198/300 283/283 - 1s - loss: 0.5955 - accuracy: 0.7905 - val_loss: 0.8925 - val_accuracy: 0.7613 - lr: 1.4412e-05 - 1s/epoch - 4ms/step Epoch 199/300 283/283 - 1s - loss: 0.5895 - accuracy: 0.7923 - val_loss: 0.8986 - val_accuracy: 0.7620 - lr: 1.4412e-05 - 1s/epoch - 4ms/step Epoch 200/300 283/283 - 1s - loss: 0.6009 - accuracy: 0.7846 - val_loss: 0.9024 - val_accuracy: 0.7610 - lr: 1.4412e-05 - 1s/epoch - 4ms/step Epoch 201/300 283/283 - 1s - loss: 0.5915 - accuracy: 0.7880 - val_loss: 0.8997 - val_accuracy: 0.7610 - lr: 1.1529e-05 - 1s/epoch - 4ms/step Epoch 202/300 283/283 - 1s - loss: 0.5915 - accuracy: 0.7910 - val_loss: 0.8923 - val_accuracy: 0.7627 - lr: 1.1529e-05 - 1s/epoch - 4ms/step Epoch 203/300 283/283 - 1s - loss: 0.5967 - accuracy: 0.7930 - val_loss: 0.9090 - val_accuracy: 0.7603 - lr: 1.1529e-05 - 1s/epoch - 4ms/step Epoch 204/300 283/283 - 1s - loss: 0.5890 - accuracy: 0.7899 - val_loss: 0.9107 - val_accuracy: 0.7610 - lr: 1.1529e-05 - 1s/epoch - 4ms/step Epoch 205/300 283/283 - 1s - loss: 0.5898 - accuracy: 0.7858 - val_loss: 0.8959 - val_accuracy: 0.7633 - lr: 1.1529e-05 - 1s/epoch - 4ms/step Epoch 206/300 283/283 - 1s - loss: 0.6012 - accuracy: 0.7867 - val_loss: 0.9025 - val_accuracy: 0.7627 - lr: 1.1529e-05 - 1s/epoch - 4ms/step Epoch 207/300 283/283 - 1s - loss: 0.5883 - accuracy: 0.7944 - val_loss: 0.9047 - val_accuracy: 0.7620 - lr: 1.1529e-05 - 1s/epoch - 4ms/step Epoch 208/300 283/283 - 1s - loss: 0.5959 - accuracy: 0.7907 - val_loss: 0.9025 - val_accuracy: 0.7630 - lr: 1.1529e-05 - 1s/epoch - 4ms/step Epoch 209/300 283/283 - 1s - loss: 0.5921 - accuracy: 0.7933 - val_loss: 0.9014 - val_accuracy: 0.7610 - lr: 1.1529e-05 - 1s/epoch - 4ms/step Epoch 210/300 283/283 - 1s - loss: 0.5882 - accuracy: 0.7928 - val_loss: 0.8948 - val_accuracy: 0.7637 - lr: 1.1529e-05 - 1s/epoch - 4ms/step Epoch 211/300 283/283 - 1s - loss: 0.5898 - accuracy: 0.7952 - val_loss: 0.9024 - val_accuracy: 0.7613 - lr: 9.2234e-06 - 1s/epoch - 4ms/step Epoch 212/300 283/283 - 1s - loss: 0.5913 - accuracy: 0.7897 - val_loss: 0.8947 - val_accuracy: 0.7627 - lr: 9.2234e-06 - 1s/epoch - 4ms/step Epoch 213/300 283/283 - 1s - loss: 0.5896 - accuracy: 0.7944 - val_loss: 0.8981 - val_accuracy: 0.7620 - lr: 9.2234e-06 - 1s/epoch - 4ms/step Epoch 214/300 283/283 - 1s - loss: 0.5818 - accuracy: 0.7911 - val_loss: 0.8958 - val_accuracy: 0.7630 - lr: 9.2234e-06 - 1s/epoch - 4ms/step Epoch 215/300 283/283 - 1s - loss: 0.5916 - accuracy: 0.7959 - val_loss: 0.8951 - val_accuracy: 0.7623 - lr: 9.2234e-06 - 1s/epoch - 4ms/step Epoch 216/300 283/283 - 1s - loss: 0.5773 - accuracy: 0.7916 - val_loss: 0.8998 - val_accuracy: 0.7617 - lr: 9.2234e-06 - 1s/epoch - 4ms/step Epoch 217/300 283/283 - 1s - loss: 0.5951 - accuracy: 0.7869 - val_loss: 0.8933 - val_accuracy: 0.7630 - lr: 9.2234e-06 - 1s/epoch - 4ms/step Epoch 218/300 283/283 - 1s - loss: 0.5882 - accuracy: 0.7944 - val_loss: 0.8939 - val_accuracy: 0.7617 - lr: 9.2234e-06 - 1s/epoch - 4ms/step Epoch 219/300 283/283 - 1s - loss: 0.5999 - accuracy: 0.7889 - val_loss: 0.9029 - val_accuracy: 0.7597 - lr: 9.2234e-06 - 1s/epoch - 4ms/step Epoch 220/300 283/283 - 1s - loss: 0.5863 - accuracy: 0.7898 - val_loss: 0.9074 - val_accuracy: 0.7617 - lr: 9.2234e-06 - 1s/epoch - 4ms/step Epoch 221/300 283/283 - 1s - loss: 0.5828 - accuracy: 0.7936 - val_loss: 0.9002 - val_accuracy: 0.7607 - lr: 7.3787e-06 - 1s/epoch - 4ms/step Epoch 222/300 283/283 - 1s - loss: 0.5830 - accuracy: 0.7949 - val_loss: 0.8971 - val_accuracy: 0.7617 - lr: 7.3787e-06 - 1s/epoch - 4ms/step Epoch 223/300 283/283 - 1s - loss: 0.5810 - accuracy: 0.7936 - val_loss: 0.9001 - val_accuracy: 0.7620 - lr: 7.3787e-06 - 1s/epoch - 4ms/step Epoch 224/300 283/283 - 1s - loss: 0.5907 - accuracy: 0.7921 - val_loss: 0.9039 - val_accuracy: 0.7613 - lr: 7.3787e-06 - 1s/epoch - 4ms/step Epoch 225/300 283/283 - 1s - loss: 0.5874 - accuracy: 0.7936 - val_loss: 0.8979 - val_accuracy: 0.7597 - lr: 7.3787e-06 - 1s/epoch - 4ms/step Epoch 226/300 283/283 - 1s - loss: 0.5872 - accuracy: 0.7901 - val_loss: 0.8979 - val_accuracy: 0.7617 - lr: 7.3787e-06 - 1s/epoch - 4ms/step Epoch 227/300 283/283 - 1s - loss: 0.5980 - accuracy: 0.7829 - val_loss: 0.8950 - val_accuracy: 0.7620 - lr: 7.3787e-06 - 1s/epoch - 4ms/step Epoch 228/300 283/283 - 1s - loss: 0.5715 - accuracy: 0.7977 - val_loss: 0.9006 - val_accuracy: 0.7620 - lr: 7.3787e-06 - 1s/epoch - 4ms/step Epoch 229/300 283/283 - 1s - loss: 0.5891 - accuracy: 0.7910 - val_loss: 0.8928 - val_accuracy: 0.7633 - lr: 7.3787e-06 - 1s/epoch - 4ms/step Epoch 230/300 283/283 - 1s - loss: 0.5916 - accuracy: 0.7950 - val_loss: 0.9008 - val_accuracy: 0.7600 - lr: 7.3787e-06 - 1s/epoch - 4ms/step Epoch 231/300 283/283 - 1s - loss: 0.5780 - accuracy: 0.7967 - val_loss: 0.8994 - val_accuracy: 0.7617 - lr: 5.9030e-06 - 1s/epoch - 4ms/step Epoch 232/300 283/283 - 1s - loss: 0.5909 - accuracy: 0.7928 - val_loss: 0.8974 - val_accuracy: 0.7643 - lr: 5.9030e-06 - 1s/epoch - 4ms/step Epoch 233/300 283/283 - 1s - loss: 0.6017 - accuracy: 0.7898 - val_loss: 0.8988 - val_accuracy: 0.7637 - lr: 5.9030e-06 - 1s/epoch - 4ms/step Epoch 234/300 283/283 - 1s - loss: 0.5803 - accuracy: 0.7967 - val_loss: 0.9003 - val_accuracy: 0.7633 - lr: 5.9030e-06 - 1s/epoch - 4ms/step Epoch 235/300 283/283 - 1s - loss: 0.5879 - accuracy: 0.7898 - val_loss: 0.8995 - val_accuracy: 0.7597 - lr: 5.9030e-06 - 1s/epoch - 4ms/step Epoch 236/300 283/283 - 1s - loss: 0.5835 - accuracy: 0.7971 - val_loss: 0.9029 - val_accuracy: 0.7607 - lr: 5.9030e-06 - 1s/epoch - 4ms/step Epoch 237/300 283/283 - 1s - loss: 0.5774 - accuracy: 0.7952 - val_loss: 0.9017 - val_accuracy: 0.7610 - lr: 5.9030e-06 - 1s/epoch - 4ms/step Epoch 238/300 283/283 - 1s - loss: 0.5820 - accuracy: 0.7933 - val_loss: 0.9026 - val_accuracy: 0.7610 - lr: 5.9030e-06 - 1s/epoch - 4ms/step Epoch 239/300 283/283 - 1s - loss: 0.5846 - accuracy: 0.7936 - val_loss: 0.9026 - val_accuracy: 0.7603 - lr: 5.9030e-06 - 1s/epoch - 4ms/step Epoch 240/300 283/283 - 1s - loss: 0.5881 - accuracy: 0.7914 - val_loss: 0.9054 - val_accuracy: 0.7607 - lr: 5.9030e-06 - 1s/epoch - 4ms/step Epoch 241/300 283/283 - 1s - loss: 0.5896 - accuracy: 0.7934 - val_loss: 0.9032 - val_accuracy: 0.7607 - lr: 4.7224e-06 - 1s/epoch - 4ms/step Epoch 242/300 283/283 - 1s - loss: 0.5846 - accuracy: 0.7967 - val_loss: 0.9041 - val_accuracy: 0.7597 - lr: 4.7224e-06 - 1s/epoch - 4ms/step Epoch 243/300 283/283 - 1s - loss: 0.5806 - accuracy: 0.7924 - val_loss: 0.8969 - val_accuracy: 0.7610 - lr: 4.7224e-06 - 1s/epoch - 4ms/step Epoch 244/300 283/283 - 1s - loss: 0.5915 - accuracy: 0.7977 - val_loss: 0.9037 - val_accuracy: 0.7613 - lr: 4.7224e-06 - 1s/epoch - 4ms/step Epoch 245/300 283/283 - 1s - loss: 0.5944 - accuracy: 0.7887 - val_loss: 0.8997 - val_accuracy: 0.7630 - lr: 4.7224e-06 - 1s/epoch - 4ms/step Epoch 246/300 283/283 - 1s - loss: 0.5804 - accuracy: 0.7945 - val_loss: 0.8977 - val_accuracy: 0.7623 - lr: 4.7224e-06 - 1s/epoch - 4ms/step Epoch 247/300 283/283 - 1s - loss: 0.5939 - accuracy: 0.7890 - val_loss: 0.9009 - val_accuracy: 0.7610 - lr: 4.7224e-06 - 1s/epoch - 4ms/step Epoch 248/300 283/283 - 1s - loss: 0.5743 - accuracy: 0.7915 - val_loss: 0.8989 - val_accuracy: 0.7617 - lr: 4.7224e-06 - 1s/epoch - 4ms/step Epoch 249/300 283/283 - 1s - loss: 0.5817 - accuracy: 0.7954 - val_loss: 0.9009 - val_accuracy: 0.7613 - lr: 4.7224e-06 - 1s/epoch - 4ms/step Epoch 250/300 283/283 - 1s - loss: 0.5881 - accuracy: 0.7940 - val_loss: 0.8992 - val_accuracy: 0.7633 - lr: 4.7224e-06 - 1s/epoch - 4ms/step Epoch 251/300 283/283 - 1s - loss: 0.5909 - accuracy: 0.7908 - val_loss: 0.9010 - val_accuracy: 0.7620 - lr: 3.7779e-06 - 1s/epoch - 4ms/step Epoch 252/300 283/283 - 1s - loss: 0.5788 - accuracy: 0.7935 - val_loss: 0.9050 - val_accuracy: 0.7623 - lr: 3.7779e-06 - 1s/epoch - 4ms/step Epoch 253/300 283/283 - 1s - loss: 0.5866 - accuracy: 0.7935 - val_loss: 0.8995 - val_accuracy: 0.7627 - lr: 3.7779e-06 - 1s/epoch - 4ms/step Epoch 254/300 283/283 - 1s - loss: 0.5957 - accuracy: 0.7919 - val_loss: 0.9016 - val_accuracy: 0.7627 - lr: 3.7779e-06 - 1s/epoch - 4ms/step Epoch 255/300 283/283 - 1s - loss: 0.5767 - accuracy: 0.7928 - val_loss: 0.9020 - val_accuracy: 0.7630 - lr: 3.7779e-06 - 1s/epoch - 4ms/step Epoch 256/300 283/283 - 1s - loss: 0.5848 - accuracy: 0.7956 - val_loss: 0.9040 - val_accuracy: 0.7620 - lr: 3.7779e-06 - 1s/epoch - 4ms/step Epoch 257/300 283/283 - 1s - loss: 0.5755 - accuracy: 0.7994 - val_loss: 0.9000 - val_accuracy: 0.7620 - lr: 3.7779e-06 - 1s/epoch - 4ms/step Epoch 258/300 283/283 - 1s - loss: 0.5775 - accuracy: 0.7926 - val_loss: 0.9021 - val_accuracy: 0.7623 - lr: 3.7779e-06 - 1s/epoch - 4ms/step Epoch 259/300 283/283 - 1s - loss: 0.5871 - accuracy: 0.7919 - val_loss: 0.9019 - val_accuracy: 0.7640 - lr: 3.7779e-06 - 1s/epoch - 4ms/step Epoch 260/300 283/283 - 1s - loss: 0.5752 - accuracy: 0.7966 - val_loss: 0.9050 - val_accuracy: 0.7613 - lr: 3.7779e-06 - 1s/epoch - 4ms/step Epoch 261/300 283/283 - 1s - loss: 0.5740 - accuracy: 0.7986 - val_loss: 0.8996 - val_accuracy: 0.7613 - lr: 3.0223e-06 - 1s/epoch - 4ms/step Epoch 262/300 283/283 - 1s - loss: 0.5828 - accuracy: 0.7974 - val_loss: 0.8994 - val_accuracy: 0.7633 - lr: 3.0223e-06 - 1s/epoch - 4ms/step Epoch 263/300 283/283 - 1s - loss: 0.5895 - accuracy: 0.7929 - val_loss: 0.9033 - val_accuracy: 0.7620 - lr: 3.0223e-06 - 1s/epoch - 4ms/step Epoch 264/300 283/283 - 1s - loss: 0.5752 - accuracy: 0.7961 - val_loss: 0.9018 - val_accuracy: 0.7627 - lr: 3.0223e-06 - 1s/epoch - 4ms/step Epoch 265/300 283/283 - 1s - loss: 0.5768 - accuracy: 0.7949 - val_loss: 0.9026 - val_accuracy: 0.7630 - lr: 3.0223e-06 - 1s/epoch - 4ms/step Epoch 266/300 283/283 - 1s - loss: 0.5806 - accuracy: 0.7976 - val_loss: 0.9029 - val_accuracy: 0.7617 - lr: 3.0223e-06 - 1s/epoch - 4ms/step Epoch 267/300 283/283 - 1s - loss: 0.5797 - accuracy: 0.7972 - val_loss: 0.9001 - val_accuracy: 0.7617 - lr: 3.0223e-06 - 1s/epoch - 4ms/step Epoch 268/300 283/283 - 1s - loss: 0.5681 - accuracy: 0.8018 - val_loss: 0.9006 - val_accuracy: 0.7623 - lr: 3.0223e-06 - 1s/epoch - 4ms/step Epoch 269/300 283/283 - 1s - loss: 0.5902 - accuracy: 0.7908 - val_loss: 0.9027 - val_accuracy: 0.7620 - lr: 3.0223e-06 - 1s/epoch - 4ms/step Epoch 270/300 283/283 - 1s - loss: 0.5809 - accuracy: 0.7922 - val_loss: 0.8954 - val_accuracy: 0.7630 - lr: 3.0223e-06 - 1s/epoch - 4ms/step Epoch 271/300 283/283 - 1s - loss: 0.5803 - accuracy: 0.7990 - val_loss: 0.9003 - val_accuracy: 0.7623 - lr: 2.4179e-06 - 1s/epoch - 4ms/step Epoch 272/300 283/283 - 1s - loss: 0.5753 - accuracy: 0.7945 - val_loss: 0.9007 - val_accuracy: 0.7620 - lr: 2.4179e-06 - 1s/epoch - 4ms/step Epoch 273/300 283/283 - 1s - loss: 0.5783 - accuracy: 0.7956 - val_loss: 0.9005 - val_accuracy: 0.7620 - lr: 2.4179e-06 - 1s/epoch - 4ms/step Epoch 274/300 283/283 - 1s - loss: 0.5850 - accuracy: 0.7933 - val_loss: 0.9019 - val_accuracy: 0.7623 - lr: 2.4179e-06 - 1s/epoch - 4ms/step Epoch 275/300 283/283 - 1s - loss: 0.5826 - accuracy: 0.7975 - val_loss: 0.9043 - val_accuracy: 0.7630 - lr: 2.4179e-06 - 1s/epoch - 4ms/step Epoch 276/300 283/283 - 1s - loss: 0.5735 - accuracy: 0.7961 - val_loss: 0.9022 - val_accuracy: 0.7620 - lr: 2.4179e-06 - 1s/epoch - 4ms/step Epoch 277/300 283/283 - 1s - loss: 0.5933 - accuracy: 0.7901 - val_loss: 0.8997 - val_accuracy: 0.7647 - lr: 2.4179e-06 - 1s/epoch - 5ms/step Epoch 278/300 283/283 - 1s - loss: 0.5809 - accuracy: 0.7967 - val_loss: 0.9004 - val_accuracy: 0.7633 - lr: 2.4179e-06 - 1s/epoch - 4ms/step Epoch 279/300 283/283 - 1s - loss: 0.5819 - accuracy: 0.7963 - val_loss: 0.9042 - val_accuracy: 0.7630 - lr: 2.4179e-06 - 1s/epoch - 4ms/step Epoch 280/300 283/283 - 1s - loss: 0.5782 - accuracy: 0.7946 - val_loss: 0.8999 - val_accuracy: 0.7627 - lr: 2.4179e-06 - 1s/epoch - 4ms/step Epoch 281/300 283/283 - 1s - loss: 0.5883 - accuracy: 0.7938 - val_loss: 0.9009 - val_accuracy: 0.7627 - lr: 1.9343e-06 - 1s/epoch - 4ms/step Epoch 282/300 283/283 - 1s - loss: 0.5756 - accuracy: 0.7959 - val_loss: 0.9011 - val_accuracy: 0.7627 - lr: 1.9343e-06 - 1s/epoch - 4ms/step Epoch 283/300 283/283 - 1s - loss: 0.5729 - accuracy: 0.7991 - val_loss: 0.9026 - val_accuracy: 0.7630 - lr: 1.9343e-06 - 1s/epoch - 4ms/step Epoch 284/300 283/283 - 1s - loss: 0.5794 - accuracy: 0.7964 - val_loss: 0.9013 - val_accuracy: 0.7630 - lr: 1.9343e-06 - 1s/epoch - 4ms/step Epoch 285/300 283/283 - 1s - loss: 0.5823 - accuracy: 0.7964 - val_loss: 0.9006 - val_accuracy: 0.7630 - lr: 1.9343e-06 - 1s/epoch - 4ms/step Epoch 286/300 283/283 - 1s - loss: 0.5904 - accuracy: 0.7901 - val_loss: 0.8995 - val_accuracy: 0.7640 - lr: 1.9343e-06 - 1s/epoch - 4ms/step Epoch 287/300 283/283 - 1s - loss: 0.5831 - accuracy: 0.7918 - val_loss: 0.9002 - val_accuracy: 0.7633 - lr: 1.9343e-06 - 1s/epoch - 4ms/step Epoch 288/300 283/283 - 1s - loss: 0.5773 - accuracy: 0.7922 - val_loss: 0.9032 - val_accuracy: 0.7627 - lr: 1.9343e-06 - 1s/epoch - 4ms/step Epoch 289/300 283/283 - 1s - loss: 0.5800 - accuracy: 0.7954 - val_loss: 0.9027 - val_accuracy: 0.7613 - lr: 1.9343e-06 - 1s/epoch - 4ms/step Epoch 290/300 283/283 - 1s - loss: 0.5726 - accuracy: 0.7966 - val_loss: 0.9026 - val_accuracy: 0.7640 - lr: 1.9343e-06 - 1s/epoch - 4ms/step Epoch 291/300 283/283 - 1s - loss: 0.5712 - accuracy: 0.7961 - val_loss: 0.9026 - val_accuracy: 0.7633 - lr: 1.5474e-06 - 1s/epoch - 4ms/step Epoch 292/300 283/283 - 1s - loss: 0.5703 - accuracy: 0.8015 - val_loss: 0.9031 - val_accuracy: 0.7623 - lr: 1.5474e-06 - 1s/epoch - 4ms/step Epoch 293/300 283/283 - 1s - loss: 0.5720 - accuracy: 0.7962 - val_loss: 0.9032 - val_accuracy: 0.7623 - lr: 1.5474e-06 - 1s/epoch - 4ms/step Epoch 294/300 283/283 - 1s - loss: 0.5844 - accuracy: 0.7913 - val_loss: 0.9002 - val_accuracy: 0.7627 - lr: 1.5474e-06 - 1s/epoch - 4ms/step Epoch 295/300 283/283 - 1s - loss: 0.5824 - accuracy: 0.7970 - val_loss: 0.9037 - val_accuracy: 0.7613 - lr: 1.5474e-06 - 1s/epoch - 4ms/step Epoch 296/300 283/283 - 1s - loss: 0.5799 - accuracy: 0.7955 - val_loss: 0.9008 - val_accuracy: 0.7623 - lr: 1.5474e-06 - 1s/epoch - 4ms/step Epoch 297/300 283/283 - 1s - loss: 0.5924 - accuracy: 0.7935 - val_loss: 0.9026 - val_accuracy: 0.7623 - lr: 1.5474e-06 - 1s/epoch - 4ms/step Epoch 298/300 283/283 - 1s - loss: 0.5879 - accuracy: 0.7928 - val_loss: 0.9008 - val_accuracy: 0.7620 - lr: 1.5474e-06 - 1s/epoch - 4ms/step Epoch 299/300 283/283 - 1s - loss: 0.5811 - accuracy: 0.7971 - val_loss: 0.8991 - val_accuracy: 0.7617 - lr: 1.5474e-06 - 1s/epoch - 4ms/step Epoch 300/300 283/283 - 1s - loss: 0.5730 - accuracy: 0.7976 - val_loss: 0.9021 - val_accuracy: 0.7633 - lr: 1.5474e-06 - 1s/epoch - 4ms/step 94/94 - 0s - loss: 0.9021 - accuracy: 0.7633 - 197ms/epoch - 2ms/step Baseline Error: 23.67%
We can see that the learning rate scheduler has not improved the performance of the model, the accuracy is stll flattening out after about 100 epochs and the overall accuracy has decreased as well. Instead of using the learning rate scheduler, we can try to add more dropout layers/regularization layers to RegularizedDOModel2 (300 epoch model) and see if we can overcome the overfitting problem and get a val accuracy of over 90%.
#Increase dropout
model_31new = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(31, 31, 1)),
layers.MaxPooling2D((2, 2)),
layers.Dropout(0.2),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Dropout(0.2),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Dropout(0.2),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dropout(0.5),
layers.Dense(15, activation='softmax')
])
model_31new.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model_31new.fit(X_train31, train_labels, validation_data=(X_val31, validation_labels),
epochs=350, batch_size=200, verbose=2, class_weight = class_weight) #adjust epochs to 300
scores = model_31new.evaluate(X_val31, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df31.loc[len(df31)] = ['ExtraRegularReducedModel', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/350
2023-11-26 13:22:32.809832: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Size of values 0 does not match size of permutation 4 @ fanin shape insequential_10/dropout_3/dropout/SelectV2-2-TransposeNHWCToNCHW-LayoutOptimizer
46/46 - 3s - loss: 3.2468 - accuracy: 0.1008 - val_loss: 2.8435 - val_accuracy: 0.0667 - 3s/epoch - 55ms/step Epoch 2/350 46/46 - 0s - loss: 3.1411 - accuracy: 0.1131 - val_loss: 2.8132 - val_accuracy: 0.0890 - 247ms/epoch - 5ms/step Epoch 3/350 46/46 - 0s - loss: 2.9888 - accuracy: 0.1508 - val_loss: 2.7089 - val_accuracy: 0.1040 - 243ms/epoch - 5ms/step Epoch 4/350 46/46 - 0s - loss: 2.7869 - accuracy: 0.1918 - val_loss: 2.4833 - val_accuracy: 0.1900 - 240ms/epoch - 5ms/step Epoch 5/350 46/46 - 0s - loss: 2.6165 - accuracy: 0.2307 - val_loss: 2.3508 - val_accuracy: 0.2273 - 245ms/epoch - 5ms/step Epoch 6/350 46/46 - 0s - loss: 2.4703 - accuracy: 0.2661 - val_loss: 2.2466 - val_accuracy: 0.2527 - 244ms/epoch - 5ms/step Epoch 7/350 46/46 - 0s - loss: 2.3777 - accuracy: 0.2869 - val_loss: 2.2395 - val_accuracy: 0.2827 - 244ms/epoch - 5ms/step Epoch 8/350 46/46 - 0s - loss: 2.2827 - accuracy: 0.3101 - val_loss: 2.0958 - val_accuracy: 0.3100 - 240ms/epoch - 5ms/step Epoch 9/350 46/46 - 0s - loss: 2.2064 - accuracy: 0.3290 - val_loss: 1.9944 - val_accuracy: 0.3603 - 252ms/epoch - 5ms/step Epoch 10/350 46/46 - 0s - loss: 2.1284 - accuracy: 0.3520 - val_loss: 2.1160 - val_accuracy: 0.3227 - 242ms/epoch - 5ms/step Epoch 11/350 46/46 - 0s - loss: 2.1018 - accuracy: 0.3547 - val_loss: 1.9260 - val_accuracy: 0.3733 - 240ms/epoch - 5ms/step Epoch 12/350 46/46 - 0s - loss: 1.9981 - accuracy: 0.3786 - val_loss: 1.7960 - val_accuracy: 0.4197 - 242ms/epoch - 5ms/step Epoch 13/350 46/46 - 0s - loss: 1.9659 - accuracy: 0.3949 - val_loss: 1.7705 - val_accuracy: 0.4243 - 245ms/epoch - 5ms/step Epoch 14/350 46/46 - 0s - loss: 1.9096 - accuracy: 0.4164 - val_loss: 1.7205 - val_accuracy: 0.4417 - 244ms/epoch - 5ms/step Epoch 15/350 46/46 - 0s - loss: 1.8337 - accuracy: 0.4337 - val_loss: 1.7015 - val_accuracy: 0.4413 - 243ms/epoch - 5ms/step Epoch 16/350 46/46 - 0s - loss: 1.8078 - accuracy: 0.4502 - val_loss: 1.7536 - val_accuracy: 0.4377 - 241ms/epoch - 5ms/step Epoch 17/350 46/46 - 0s - loss: 1.8060 - accuracy: 0.4478 - val_loss: 1.5842 - val_accuracy: 0.4797 - 248ms/epoch - 5ms/step Epoch 18/350 46/46 - 0s - loss: 1.7473 - accuracy: 0.4576 - val_loss: 1.6520 - val_accuracy: 0.4537 - 243ms/epoch - 5ms/step Epoch 19/350 46/46 - 0s - loss: 1.7102 - accuracy: 0.4696 - val_loss: 1.7044 - val_accuracy: 0.4433 - 254ms/epoch - 6ms/step Epoch 20/350 46/46 - 0s - loss: 1.7259 - accuracy: 0.4745 - val_loss: 1.5072 - val_accuracy: 0.5077 - 243ms/epoch - 5ms/step Epoch 21/350 46/46 - 0s - loss: 1.6548 - accuracy: 0.4868 - val_loss: 1.4666 - val_accuracy: 0.5077 - 250ms/epoch - 5ms/step Epoch 22/350 46/46 - 0s - loss: 1.6362 - accuracy: 0.4971 - val_loss: 1.4606 - val_accuracy: 0.5290 - 247ms/epoch - 5ms/step Epoch 23/350 46/46 - 0s - loss: 1.6036 - accuracy: 0.5102 - val_loss: 1.4073 - val_accuracy: 0.5483 - 244ms/epoch - 5ms/step Epoch 24/350 46/46 - 0s - loss: 1.5639 - accuracy: 0.5163 - val_loss: 1.4303 - val_accuracy: 0.5487 - 242ms/epoch - 5ms/step Epoch 25/350 46/46 - 0s - loss: 1.5452 - accuracy: 0.5270 - val_loss: 1.4346 - val_accuracy: 0.5543 - 250ms/epoch - 5ms/step Epoch 26/350 46/46 - 0s - loss: 1.5323 - accuracy: 0.5235 - val_loss: 1.3705 - val_accuracy: 0.5610 - 244ms/epoch - 5ms/step Epoch 27/350 46/46 - 0s - loss: 1.5323 - accuracy: 0.5255 - val_loss: 1.3357 - val_accuracy: 0.5830 - 240ms/epoch - 5ms/step Epoch 28/350 46/46 - 0s - loss: 1.4803 - accuracy: 0.5440 - val_loss: 1.4471 - val_accuracy: 0.5243 - 242ms/epoch - 5ms/step Epoch 29/350 46/46 - 0s - loss: 1.4817 - accuracy: 0.5470 - val_loss: 1.3039 - val_accuracy: 0.5863 - 244ms/epoch - 5ms/step Epoch 30/350 46/46 - 0s - loss: 1.4616 - accuracy: 0.5524 - val_loss: 1.2480 - val_accuracy: 0.6200 - 242ms/epoch - 5ms/step Epoch 31/350 46/46 - 0s - loss: 1.4450 - accuracy: 0.5529 - val_loss: 1.2977 - val_accuracy: 0.5923 - 241ms/epoch - 5ms/step Epoch 32/350 46/46 - 0s - loss: 1.4147 - accuracy: 0.5598 - val_loss: 1.2770 - val_accuracy: 0.5957 - 240ms/epoch - 5ms/step Epoch 33/350 46/46 - 0s - loss: 1.3882 - accuracy: 0.5702 - val_loss: 1.2337 - val_accuracy: 0.6157 - 244ms/epoch - 5ms/step Epoch 34/350 46/46 - 0s - loss: 1.3790 - accuracy: 0.5741 - val_loss: 1.1660 - val_accuracy: 0.6413 - 242ms/epoch - 5ms/step Epoch 35/350 46/46 - 0s - loss: 1.3643 - accuracy: 0.5774 - val_loss: 1.1805 - val_accuracy: 0.6360 - 241ms/epoch - 5ms/step Epoch 36/350 46/46 - 0s - loss: 1.3580 - accuracy: 0.5851 - val_loss: 1.2278 - val_accuracy: 0.6303 - 240ms/epoch - 5ms/step Epoch 37/350 46/46 - 0s - loss: 1.3631 - accuracy: 0.5852 - val_loss: 1.2596 - val_accuracy: 0.6023 - 242ms/epoch - 5ms/step Epoch 38/350 46/46 - 0s - loss: 1.3502 - accuracy: 0.5853 - val_loss: 1.1525 - val_accuracy: 0.6503 - 247ms/epoch - 5ms/step Epoch 39/350 46/46 - 0s - loss: 1.3249 - accuracy: 0.5897 - val_loss: 1.1284 - val_accuracy: 0.6477 - 240ms/epoch - 5ms/step Epoch 40/350 46/46 - 0s - loss: 1.2970 - accuracy: 0.5929 - val_loss: 1.2462 - val_accuracy: 0.6093 - 240ms/epoch - 5ms/step Epoch 41/350 46/46 - 0s - loss: 1.2930 - accuracy: 0.5998 - val_loss: 1.0883 - val_accuracy: 0.6717 - 246ms/epoch - 5ms/step Epoch 42/350 46/46 - 0s - loss: 1.2526 - accuracy: 0.6185 - val_loss: 1.0909 - val_accuracy: 0.6720 - 245ms/epoch - 5ms/step Epoch 43/350 46/46 - 0s - loss: 1.2776 - accuracy: 0.6134 - val_loss: 1.0798 - val_accuracy: 0.6693 - 242ms/epoch - 5ms/step Epoch 44/350 46/46 - 0s - loss: 1.2413 - accuracy: 0.6220 - val_loss: 1.0314 - val_accuracy: 0.6923 - 242ms/epoch - 5ms/step Epoch 45/350 46/46 - 0s - loss: 1.2589 - accuracy: 0.6120 - val_loss: 1.0632 - val_accuracy: 0.6790 - 245ms/epoch - 5ms/step Epoch 46/350 46/46 - 0s - loss: 1.2244 - accuracy: 0.6235 - val_loss: 1.0246 - val_accuracy: 0.6950 - 242ms/epoch - 5ms/step Epoch 47/350 46/46 - 0s - loss: 1.2171 - accuracy: 0.6239 - val_loss: 1.0467 - val_accuracy: 0.6767 - 240ms/epoch - 5ms/step Epoch 48/350 46/46 - 0s - loss: 1.2088 - accuracy: 0.6279 - val_loss: 1.0200 - val_accuracy: 0.6987 - 242ms/epoch - 5ms/step Epoch 49/350 46/46 - 0s - loss: 1.2179 - accuracy: 0.6210 - val_loss: 1.0128 - val_accuracy: 0.6997 - 250ms/epoch - 5ms/step Epoch 50/350 46/46 - 0s - loss: 1.1944 - accuracy: 0.6286 - val_loss: 1.0022 - val_accuracy: 0.7070 - 255ms/epoch - 6ms/step Epoch 51/350 46/46 - 0s - loss: 1.1802 - accuracy: 0.6378 - val_loss: 1.0823 - val_accuracy: 0.6697 - 257ms/epoch - 6ms/step Epoch 52/350 46/46 - 0s - loss: 1.1843 - accuracy: 0.6390 - val_loss: 1.0104 - val_accuracy: 0.6963 - 254ms/epoch - 6ms/step Epoch 53/350 46/46 - 0s - loss: 1.1677 - accuracy: 0.6477 - val_loss: 0.9967 - val_accuracy: 0.6947 - 246ms/epoch - 5ms/step Epoch 54/350 46/46 - 0s - loss: 1.1670 - accuracy: 0.6423 - val_loss: 0.9654 - val_accuracy: 0.7090 - 246ms/epoch - 5ms/step Epoch 55/350 46/46 - 0s - loss: 1.1384 - accuracy: 0.6495 - val_loss: 0.9239 - val_accuracy: 0.7330 - 242ms/epoch - 5ms/step Epoch 56/350 46/46 - 0s - loss: 1.1349 - accuracy: 0.6486 - val_loss: 0.9304 - val_accuracy: 0.7227 - 243ms/epoch - 5ms/step Epoch 57/350 46/46 - 0s - loss: 1.1033 - accuracy: 0.6639 - val_loss: 0.9530 - val_accuracy: 0.7187 - 242ms/epoch - 5ms/step Epoch 58/350 46/46 - 0s - loss: 1.1264 - accuracy: 0.6594 - val_loss: 0.9445 - val_accuracy: 0.7253 - 246ms/epoch - 5ms/step Epoch 59/350 46/46 - 0s - loss: 1.1297 - accuracy: 0.6560 - val_loss: 0.9802 - val_accuracy: 0.7157 - 242ms/epoch - 5ms/step Epoch 60/350 46/46 - 0s - loss: 1.1413 - accuracy: 0.6472 - val_loss: 0.9564 - val_accuracy: 0.7200 - 241ms/epoch - 5ms/step Epoch 61/350 46/46 - 0s - loss: 1.0997 - accuracy: 0.6612 - val_loss: 0.8935 - val_accuracy: 0.7397 - 248ms/epoch - 5ms/step Epoch 62/350 46/46 - 0s - loss: 1.0727 - accuracy: 0.6722 - val_loss: 0.8909 - val_accuracy: 0.7387 - 246ms/epoch - 5ms/step Epoch 63/350 46/46 - 0s - loss: 1.0796 - accuracy: 0.6659 - val_loss: 0.8840 - val_accuracy: 0.7433 - 244ms/epoch - 5ms/step Epoch 64/350 46/46 - 0s - loss: 1.0903 - accuracy: 0.6700 - val_loss: 1.2769 - val_accuracy: 0.5977 - 244ms/epoch - 5ms/step Epoch 65/350 46/46 - 0s - loss: 1.1252 - accuracy: 0.6534 - val_loss: 0.8908 - val_accuracy: 0.7360 - 244ms/epoch - 5ms/step Epoch 66/350 46/46 - 0s - loss: 1.0650 - accuracy: 0.6743 - val_loss: 0.8781 - val_accuracy: 0.7423 - 256ms/epoch - 6ms/step Epoch 67/350 46/46 - 0s - loss: 1.0783 - accuracy: 0.6694 - val_loss: 0.8542 - val_accuracy: 0.7553 - 254ms/epoch - 6ms/step Epoch 68/350 46/46 - 0s - loss: 1.0270 - accuracy: 0.6860 - val_loss: 0.8860 - val_accuracy: 0.7340 - 256ms/epoch - 6ms/step Epoch 69/350 46/46 - 0s - loss: 1.0432 - accuracy: 0.6870 - val_loss: 0.8968 - val_accuracy: 0.7367 - 253ms/epoch - 5ms/step Epoch 70/350 46/46 - 0s - loss: 1.0531 - accuracy: 0.6810 - val_loss: 0.8475 - val_accuracy: 0.7580 - 245ms/epoch - 5ms/step Epoch 71/350 46/46 - 0s - loss: 1.0094 - accuracy: 0.6901 - val_loss: 0.8410 - val_accuracy: 0.7603 - 246ms/epoch - 5ms/step Epoch 72/350 46/46 - 0s - loss: 1.0229 - accuracy: 0.6938 - val_loss: 0.8442 - val_accuracy: 0.7523 - 243ms/epoch - 5ms/step Epoch 73/350 46/46 - 0s - loss: 0.9935 - accuracy: 0.6938 - val_loss: 0.8663 - val_accuracy: 0.7450 - 245ms/epoch - 5ms/step Epoch 74/350 46/46 - 0s - loss: 1.0148 - accuracy: 0.6932 - val_loss: 0.8460 - val_accuracy: 0.7557 - 245ms/epoch - 5ms/step Epoch 75/350 46/46 - 0s - loss: 0.9912 - accuracy: 0.6992 - val_loss: 0.8248 - val_accuracy: 0.7647 - 242ms/epoch - 5ms/step Epoch 76/350 46/46 - 0s - loss: 0.9855 - accuracy: 0.6930 - val_loss: 0.8420 - val_accuracy: 0.7543 - 239ms/epoch - 5ms/step Epoch 77/350 46/46 - 0s - loss: 1.0055 - accuracy: 0.6977 - val_loss: 0.8307 - val_accuracy: 0.7557 - 242ms/epoch - 5ms/step Epoch 78/350 46/46 - 0s - loss: 0.9725 - accuracy: 0.7047 - val_loss: 0.8236 - val_accuracy: 0.7597 - 246ms/epoch - 5ms/step Epoch 79/350 46/46 - 0s - loss: 0.9769 - accuracy: 0.7018 - val_loss: 0.8398 - val_accuracy: 0.7490 - 243ms/epoch - 5ms/step Epoch 80/350 46/46 - 0s - loss: 0.9723 - accuracy: 0.7072 - val_loss: 0.7925 - val_accuracy: 0.7697 - 240ms/epoch - 5ms/step Epoch 81/350 46/46 - 0s - loss: 0.9680 - accuracy: 0.7005 - val_loss: 0.8133 - val_accuracy: 0.7620 - 247ms/epoch - 5ms/step Epoch 82/350 46/46 - 0s - loss: 0.9512 - accuracy: 0.7074 - val_loss: 0.7694 - val_accuracy: 0.7763 - 243ms/epoch - 5ms/step Epoch 83/350 46/46 - 0s - loss: 0.9481 - accuracy: 0.7051 - val_loss: 0.8036 - val_accuracy: 0.7607 - 245ms/epoch - 5ms/step Epoch 84/350 46/46 - 0s - loss: 0.9287 - accuracy: 0.7142 - val_loss: 0.8937 - val_accuracy: 0.7297 - 242ms/epoch - 5ms/step Epoch 85/350 46/46 - 0s - loss: 0.9708 - accuracy: 0.7016 - val_loss: 0.7947 - val_accuracy: 0.7670 - 241ms/epoch - 5ms/step Epoch 86/350 46/46 - 0s - loss: 0.9316 - accuracy: 0.7147 - val_loss: 0.7725 - val_accuracy: 0.7777 - 241ms/epoch - 5ms/step Epoch 87/350 46/46 - 0s - loss: 0.9433 - accuracy: 0.7188 - val_loss: 0.7656 - val_accuracy: 0.7777 - 246ms/epoch - 5ms/step Epoch 88/350 46/46 - 0s - loss: 0.9214 - accuracy: 0.7229 - val_loss: 0.7519 - val_accuracy: 0.7830 - 246ms/epoch - 5ms/step Epoch 89/350 46/46 - 0s - loss: 0.9417 - accuracy: 0.7141 - val_loss: 0.7866 - val_accuracy: 0.7680 - 248ms/epoch - 5ms/step Epoch 90/350 46/46 - 0s - loss: 0.9354 - accuracy: 0.7107 - val_loss: 0.7624 - val_accuracy: 0.7777 - 249ms/epoch - 5ms/step Epoch 91/350 46/46 - 0s - loss: 0.8961 - accuracy: 0.7302 - val_loss: 0.7378 - val_accuracy: 0.7887 - 244ms/epoch - 5ms/step Epoch 92/350 46/46 - 0s - loss: 0.9062 - accuracy: 0.7232 - val_loss: 0.7733 - val_accuracy: 0.7743 - 242ms/epoch - 5ms/step Epoch 93/350 46/46 - 0s - loss: 0.8958 - accuracy: 0.7257 - val_loss: 0.7510 - val_accuracy: 0.7820 - 242ms/epoch - 5ms/step Epoch 94/350 46/46 - 0s - loss: 0.9746 - accuracy: 0.7057 - val_loss: 0.7505 - val_accuracy: 0.7927 - 245ms/epoch - 5ms/step Epoch 95/350 46/46 - 0s - loss: 0.8852 - accuracy: 0.7244 - val_loss: 0.7287 - val_accuracy: 0.7897 - 246ms/epoch - 5ms/step Epoch 96/350 46/46 - 0s - loss: 0.8861 - accuracy: 0.7252 - val_loss: 0.7203 - val_accuracy: 0.7940 - 242ms/epoch - 5ms/step Epoch 97/350 46/46 - 0s - loss: 0.8844 - accuracy: 0.7265 - val_loss: 0.7215 - val_accuracy: 0.7940 - 248ms/epoch - 5ms/step Epoch 98/350 46/46 - 0s - loss: 0.8706 - accuracy: 0.7304 - val_loss: 0.7400 - val_accuracy: 0.7887 - 245ms/epoch - 5ms/step Epoch 99/350 46/46 - 0s - loss: 0.8862 - accuracy: 0.7286 - val_loss: 0.7475 - val_accuracy: 0.7893 - 243ms/epoch - 5ms/step Epoch 100/350 46/46 - 0s - loss: 0.8743 - accuracy: 0.7346 - val_loss: 0.7994 - val_accuracy: 0.7660 - 242ms/epoch - 5ms/step Epoch 101/350 46/46 - 0s - loss: 0.8684 - accuracy: 0.7322 - val_loss: 0.7235 - val_accuracy: 0.7967 - 245ms/epoch - 5ms/step Epoch 102/350 46/46 - 0s - loss: 0.8674 - accuracy: 0.7368 - val_loss: 0.6968 - val_accuracy: 0.8063 - 243ms/epoch - 5ms/step Epoch 103/350 46/46 - 0s - loss: 0.8720 - accuracy: 0.7321 - val_loss: 0.7538 - val_accuracy: 0.7893 - 239ms/epoch - 5ms/step Epoch 104/350 46/46 - 0s - loss: 0.8629 - accuracy: 0.7376 - val_loss: 0.7065 - val_accuracy: 0.8043 - 239ms/epoch - 5ms/step Epoch 105/350 46/46 - 0s - loss: 0.8457 - accuracy: 0.7420 - val_loss: 0.7242 - val_accuracy: 0.7947 - 241ms/epoch - 5ms/step Epoch 106/350 46/46 - 0s - loss: 0.8803 - accuracy: 0.7333 - val_loss: 0.7148 - val_accuracy: 0.7973 - 242ms/epoch - 5ms/step Epoch 107/350 46/46 - 0s - loss: 0.8432 - accuracy: 0.7462 - val_loss: 0.7006 - val_accuracy: 0.7987 - 242ms/epoch - 5ms/step Epoch 108/350 46/46 - 0s - loss: 0.8544 - accuracy: 0.7409 - val_loss: 0.6917 - val_accuracy: 0.7950 - 240ms/epoch - 5ms/step Epoch 109/350 46/46 - 0s - loss: 0.8326 - accuracy: 0.7418 - val_loss: 0.6869 - val_accuracy: 0.8037 - 243ms/epoch - 5ms/step Epoch 110/350 46/46 - 0s - loss: 0.8433 - accuracy: 0.7424 - val_loss: 0.6745 - val_accuracy: 0.8083 - 253ms/epoch - 5ms/step Epoch 111/350 46/46 - 0s - loss: 0.8312 - accuracy: 0.7478 - val_loss: 0.6887 - val_accuracy: 0.8010 - 258ms/epoch - 6ms/step Epoch 112/350 46/46 - 0s - loss: 0.8152 - accuracy: 0.7461 - val_loss: 0.6704 - val_accuracy: 0.8087 - 261ms/epoch - 6ms/step Epoch 113/350 46/46 - 0s - loss: 0.8369 - accuracy: 0.7446 - val_loss: 0.7940 - val_accuracy: 0.7637 - 254ms/epoch - 6ms/step Epoch 114/350 46/46 - 0s - loss: 0.8376 - accuracy: 0.7416 - val_loss: 0.6956 - val_accuracy: 0.8023 - 259ms/epoch - 6ms/step Epoch 115/350 46/46 - 0s - loss: 0.8307 - accuracy: 0.7477 - val_loss: 0.6604 - val_accuracy: 0.8140 - 268ms/epoch - 6ms/step Epoch 116/350 46/46 - 0s - loss: 0.8360 - accuracy: 0.7449 - val_loss: 0.6858 - val_accuracy: 0.8023 - 258ms/epoch - 6ms/step Epoch 117/350 46/46 - 0s - loss: 0.7810 - accuracy: 0.7622 - val_loss: 0.6705 - val_accuracy: 0.8083 - 264ms/epoch - 6ms/step Epoch 118/350 46/46 - 0s - loss: 0.7949 - accuracy: 0.7574 - val_loss: 0.6625 - val_accuracy: 0.8150 - 268ms/epoch - 6ms/step Epoch 119/350 46/46 - 0s - loss: 0.8130 - accuracy: 0.7489 - val_loss: 0.7155 - val_accuracy: 0.7917 - 264ms/epoch - 6ms/step Epoch 120/350 46/46 - 0s - loss: 0.8081 - accuracy: 0.7573 - val_loss: 0.6485 - val_accuracy: 0.8170 - 265ms/epoch - 6ms/step Epoch 121/350 46/46 - 0s - loss: 0.7843 - accuracy: 0.7563 - val_loss: 0.6609 - val_accuracy: 0.8077 - 268ms/epoch - 6ms/step Epoch 122/350 46/46 - 0s - loss: 0.7977 - accuracy: 0.7583 - val_loss: 0.6477 - val_accuracy: 0.8123 - 254ms/epoch - 6ms/step Epoch 123/350 46/46 - 0s - loss: 0.7771 - accuracy: 0.7616 - val_loss: 0.6632 - val_accuracy: 0.8097 - 255ms/epoch - 6ms/step Epoch 124/350 46/46 - 0s - loss: 0.7840 - accuracy: 0.7550 - val_loss: 0.6514 - val_accuracy: 0.8163 - 253ms/epoch - 6ms/step Epoch 125/350 46/46 - 0s - loss: 0.7875 - accuracy: 0.7624 - val_loss: 0.7019 - val_accuracy: 0.7953 - 244ms/epoch - 5ms/step Epoch 126/350 46/46 - 0s - loss: 0.8022 - accuracy: 0.7565 - val_loss: 0.6391 - val_accuracy: 0.8220 - 241ms/epoch - 5ms/step Epoch 127/350 46/46 - 0s - loss: 0.7669 - accuracy: 0.7604 - val_loss: 0.6367 - val_accuracy: 0.8197 - 244ms/epoch - 5ms/step Epoch 128/350 46/46 - 0s - loss: 0.7712 - accuracy: 0.7641 - val_loss: 0.7038 - val_accuracy: 0.8023 - 240ms/epoch - 5ms/step Epoch 129/350 46/46 - 0s - loss: 0.8360 - accuracy: 0.7480 - val_loss: 0.6518 - val_accuracy: 0.8113 - 249ms/epoch - 5ms/step Epoch 130/350 46/46 - 0s - loss: 0.7651 - accuracy: 0.7626 - val_loss: 0.6200 - val_accuracy: 0.8263 - 243ms/epoch - 5ms/step Epoch 131/350 46/46 - 0s - loss: 0.7525 - accuracy: 0.7724 - val_loss: 0.6338 - val_accuracy: 0.8200 - 240ms/epoch - 5ms/step Epoch 132/350 46/46 - 0s - loss: 0.7413 - accuracy: 0.7718 - val_loss: 0.6494 - val_accuracy: 0.8167 - 241ms/epoch - 5ms/step Epoch 133/350 46/46 - 0s - loss: 0.7442 - accuracy: 0.7722 - val_loss: 0.6399 - val_accuracy: 0.8177 - 246ms/epoch - 5ms/step Epoch 134/350 46/46 - 0s - loss: 0.7510 - accuracy: 0.7718 - val_loss: 0.6158 - val_accuracy: 0.8250 - 242ms/epoch - 5ms/step Epoch 135/350 46/46 - 0s - loss: 0.7413 - accuracy: 0.7764 - val_loss: 0.6265 - val_accuracy: 0.8273 - 250ms/epoch - 5ms/step Epoch 136/350 46/46 - 0s - loss: 0.7419 - accuracy: 0.7774 - val_loss: 0.5995 - val_accuracy: 0.8347 - 245ms/epoch - 5ms/step Epoch 137/350 46/46 - 0s - loss: 0.7411 - accuracy: 0.7751 - val_loss: 0.6326 - val_accuracy: 0.8137 - 244ms/epoch - 5ms/step Epoch 138/350 46/46 - 0s - loss: 0.7502 - accuracy: 0.7682 - val_loss: 0.6208 - val_accuracy: 0.8267 - 241ms/epoch - 5ms/step Epoch 139/350 46/46 - 0s - loss: 0.7275 - accuracy: 0.7776 - val_loss: 0.6548 - val_accuracy: 0.8100 - 251ms/epoch - 5ms/step Epoch 140/350 46/46 - 0s - loss: 0.7375 - accuracy: 0.7722 - val_loss: 0.6116 - val_accuracy: 0.8290 - 245ms/epoch - 5ms/step Epoch 141/350 46/46 - 0s - loss: 0.7237 - accuracy: 0.7841 - val_loss: 0.6508 - val_accuracy: 0.8267 - 252ms/epoch - 5ms/step Epoch 142/350 46/46 - 0s - loss: 0.7240 - accuracy: 0.7820 - val_loss: 0.6455 - val_accuracy: 0.8247 - 243ms/epoch - 5ms/step Epoch 143/350 46/46 - 0s - loss: 0.7204 - accuracy: 0.7807 - val_loss: 0.6186 - val_accuracy: 0.8303 - 248ms/epoch - 5ms/step Epoch 144/350 46/46 - 0s - loss: 0.7015 - accuracy: 0.7856 - val_loss: 0.6167 - val_accuracy: 0.8227 - 244ms/epoch - 5ms/step Epoch 145/350 46/46 - 0s - loss: 0.7162 - accuracy: 0.7794 - val_loss: 0.5856 - val_accuracy: 0.8427 - 253ms/epoch - 6ms/step Epoch 146/350 46/46 - 0s - loss: 0.6814 - accuracy: 0.7955 - val_loss: 0.5876 - val_accuracy: 0.8333 - 244ms/epoch - 5ms/step Epoch 147/350 46/46 - 0s - loss: 0.6902 - accuracy: 0.7919 - val_loss: 0.5707 - val_accuracy: 0.8387 - 243ms/epoch - 5ms/step Epoch 148/350 46/46 - 0s - loss: 0.6950 - accuracy: 0.7878 - val_loss: 0.5874 - val_accuracy: 0.8347 - 238ms/epoch - 5ms/step Epoch 149/350 46/46 - 0s - loss: 0.7212 - accuracy: 0.7826 - val_loss: 0.6099 - val_accuracy: 0.8283 - 242ms/epoch - 5ms/step Epoch 150/350 46/46 - 0s - loss: 0.6843 - accuracy: 0.7877 - val_loss: 0.5873 - val_accuracy: 0.8410 - 245ms/epoch - 5ms/step Epoch 151/350 46/46 - 0s - loss: 0.6986 - accuracy: 0.7900 - val_loss: 0.5860 - val_accuracy: 0.8400 - 244ms/epoch - 5ms/step Epoch 152/350 46/46 - 0s - loss: 0.6752 - accuracy: 0.7926 - val_loss: 0.5573 - val_accuracy: 0.8520 - 240ms/epoch - 5ms/step Epoch 153/350 46/46 - 0s - loss: 0.7027 - accuracy: 0.7869 - val_loss: 0.6049 - val_accuracy: 0.8323 - 243ms/epoch - 5ms/step Epoch 154/350 46/46 - 0s - loss: 0.6819 - accuracy: 0.7874 - val_loss: 0.6242 - val_accuracy: 0.8303 - 240ms/epoch - 5ms/step Epoch 155/350 46/46 - 0s - loss: 0.6959 - accuracy: 0.7905 - val_loss: 0.6083 - val_accuracy: 0.8307 - 239ms/epoch - 5ms/step Epoch 156/350 46/46 - 0s - loss: 0.6994 - accuracy: 0.7882 - val_loss: 0.5803 - val_accuracy: 0.8390 - 243ms/epoch - 5ms/step Epoch 157/350 46/46 - 0s - loss: 0.7039 - accuracy: 0.7840 - val_loss: 0.5995 - val_accuracy: 0.8393 - 246ms/epoch - 5ms/step Epoch 158/350 46/46 - 0s - loss: 0.6793 - accuracy: 0.7920 - val_loss: 0.5796 - val_accuracy: 0.8470 - 244ms/epoch - 5ms/step Epoch 159/350 46/46 - 0s - loss: 0.6742 - accuracy: 0.7953 - val_loss: 0.5821 - val_accuracy: 0.8410 - 244ms/epoch - 5ms/step Epoch 160/350 46/46 - 0s - loss: 0.6630 - accuracy: 0.7967 - val_loss: 0.6033 - val_accuracy: 0.8347 - 237ms/epoch - 5ms/step Epoch 161/350 46/46 - 0s - loss: 0.6663 - accuracy: 0.7990 - val_loss: 0.5501 - val_accuracy: 0.8520 - 239ms/epoch - 5ms/step Epoch 162/350 46/46 - 0s - loss: 0.6539 - accuracy: 0.8013 - val_loss: 0.5571 - val_accuracy: 0.8493 - 241ms/epoch - 5ms/step Epoch 163/350 46/46 - 0s - loss: 0.6673 - accuracy: 0.7964 - val_loss: 0.5670 - val_accuracy: 0.8450 - 243ms/epoch - 5ms/step Epoch 164/350 46/46 - 0s - loss: 0.6865 - accuracy: 0.7918 - val_loss: 0.5945 - val_accuracy: 0.8390 - 241ms/epoch - 5ms/step Epoch 165/350 46/46 - 0s - loss: 0.6931 - accuracy: 0.7880 - val_loss: 0.6023 - val_accuracy: 0.8267 - 240ms/epoch - 5ms/step Epoch 166/350 46/46 - 0s - loss: 0.6628 - accuracy: 0.7940 - val_loss: 0.5559 - val_accuracy: 0.8473 - 239ms/epoch - 5ms/step Epoch 167/350 46/46 - 0s - loss: 0.6623 - accuracy: 0.8024 - val_loss: 0.5719 - val_accuracy: 0.8400 - 243ms/epoch - 5ms/step Epoch 168/350 46/46 - 0s - loss: 0.6568 - accuracy: 0.7966 - val_loss: 0.6010 - val_accuracy: 0.8417 - 240ms/epoch - 5ms/step Epoch 169/350 46/46 - 0s - loss: 0.6480 - accuracy: 0.8057 - val_loss: 0.5490 - val_accuracy: 0.8553 - 239ms/epoch - 5ms/step Epoch 170/350 46/46 - 0s - loss: 0.6461 - accuracy: 0.8060 - val_loss: 0.5375 - val_accuracy: 0.8517 - 246ms/epoch - 5ms/step Epoch 171/350 46/46 - 0s - loss: 0.6370 - accuracy: 0.8097 - val_loss: 0.5369 - val_accuracy: 0.8560 - 241ms/epoch - 5ms/step Epoch 172/350 46/46 - 0s - loss: 0.6366 - accuracy: 0.8077 - val_loss: 0.5513 - val_accuracy: 0.8503 - 249ms/epoch - 5ms/step Epoch 173/350 46/46 - 0s - loss: 0.6415 - accuracy: 0.8068 - val_loss: 0.5457 - val_accuracy: 0.8530 - 250ms/epoch - 5ms/step Epoch 174/350 46/46 - 0s - loss: 0.6297 - accuracy: 0.8124 - val_loss: 0.5441 - val_accuracy: 0.8517 - 250ms/epoch - 5ms/step Epoch 175/350 46/46 - 0s - loss: 0.6276 - accuracy: 0.8069 - val_loss: 0.5673 - val_accuracy: 0.8460 - 253ms/epoch - 6ms/step Epoch 176/350 46/46 - 0s - loss: 0.6507 - accuracy: 0.8028 - val_loss: 0.5491 - val_accuracy: 0.8557 - 244ms/epoch - 5ms/step Epoch 177/350 46/46 - 0s - loss: 0.6474 - accuracy: 0.8051 - val_loss: 0.5657 - val_accuracy: 0.8443 - 238ms/epoch - 5ms/step Epoch 178/350 46/46 - 0s - loss: 0.6364 - accuracy: 0.8062 - val_loss: 0.5819 - val_accuracy: 0.8387 - 238ms/epoch - 5ms/step Epoch 179/350 46/46 - 0s - loss: 0.6423 - accuracy: 0.8025 - val_loss: 0.5388 - val_accuracy: 0.8567 - 239ms/epoch - 5ms/step Epoch 180/350 46/46 - 0s - loss: 0.6271 - accuracy: 0.8115 - val_loss: 0.5529 - val_accuracy: 0.8520 - 238ms/epoch - 5ms/step Epoch 181/350 46/46 - 0s - loss: 0.6506 - accuracy: 0.7993 - val_loss: 0.5674 - val_accuracy: 0.8440 - 239ms/epoch - 5ms/step Epoch 182/350 46/46 - 0s - loss: 0.6317 - accuracy: 0.8059 - val_loss: 0.5439 - val_accuracy: 0.8537 - 242ms/epoch - 5ms/step Epoch 183/350 46/46 - 0s - loss: 0.6429 - accuracy: 0.8072 - val_loss: 0.5519 - val_accuracy: 0.8500 - 241ms/epoch - 5ms/step Epoch 184/350 46/46 - 0s - loss: 0.6037 - accuracy: 0.8176 - val_loss: 0.5492 - val_accuracy: 0.8557 - 239ms/epoch - 5ms/step Epoch 185/350 46/46 - 0s - loss: 0.6138 - accuracy: 0.8167 - val_loss: 0.5682 - val_accuracy: 0.8467 - 238ms/epoch - 5ms/step Epoch 186/350 46/46 - 0s - loss: 0.6311 - accuracy: 0.8042 - val_loss: 0.5277 - val_accuracy: 0.8603 - 254ms/epoch - 6ms/step Epoch 187/350 46/46 - 0s - loss: 0.6083 - accuracy: 0.8115 - val_loss: 0.5398 - val_accuracy: 0.8550 - 249ms/epoch - 5ms/step Epoch 188/350 46/46 - 0s - loss: 0.6377 - accuracy: 0.8070 - val_loss: 0.5392 - val_accuracy: 0.8553 - 248ms/epoch - 5ms/step Epoch 189/350 46/46 - 0s - loss: 0.6331 - accuracy: 0.8091 - val_loss: 0.5412 - val_accuracy: 0.8547 - 247ms/epoch - 5ms/step Epoch 190/350 46/46 - 0s - loss: 0.5909 - accuracy: 0.8186 - val_loss: 0.5178 - val_accuracy: 0.8637 - 242ms/epoch - 5ms/step Epoch 191/350 46/46 - 0s - loss: 0.6130 - accuracy: 0.8155 - val_loss: 0.5232 - val_accuracy: 0.8577 - 240ms/epoch - 5ms/step Epoch 192/350 46/46 - 0s - loss: 0.6409 - accuracy: 0.8115 - val_loss: 0.6490 - val_accuracy: 0.8230 - 242ms/epoch - 5ms/step Epoch 193/350 46/46 - 0s - loss: 0.6312 - accuracy: 0.8084 - val_loss: 0.5129 - val_accuracy: 0.8610 - 239ms/epoch - 5ms/step Epoch 194/350 46/46 - 0s - loss: 0.5953 - accuracy: 0.8223 - val_loss: 0.5211 - val_accuracy: 0.8637 - 242ms/epoch - 5ms/step Epoch 195/350 46/46 - 0s - loss: 0.5982 - accuracy: 0.8163 - val_loss: 0.5295 - val_accuracy: 0.8590 - 238ms/epoch - 5ms/step Epoch 196/350 46/46 - 0s - loss: 0.5964 - accuracy: 0.8217 - val_loss: 0.5392 - val_accuracy: 0.8587 - 239ms/epoch - 5ms/step Epoch 197/350 46/46 - 0s - loss: 0.6002 - accuracy: 0.8176 - val_loss: 0.5306 - val_accuracy: 0.8550 - 236ms/epoch - 5ms/step Epoch 198/350 46/46 - 0s - loss: 0.6144 - accuracy: 0.8170 - val_loss: 0.5143 - val_accuracy: 0.8610 - 239ms/epoch - 5ms/step Epoch 199/350 46/46 - 0s - loss: 0.5966 - accuracy: 0.8243 - val_loss: 0.5185 - val_accuracy: 0.8667 - 245ms/epoch - 5ms/step Epoch 200/350 46/46 - 0s - loss: 0.5853 - accuracy: 0.8248 - val_loss: 0.5129 - val_accuracy: 0.8657 - 247ms/epoch - 5ms/step Epoch 201/350 46/46 - 0s - loss: 0.5833 - accuracy: 0.8234 - val_loss: 0.5227 - val_accuracy: 0.8590 - 241ms/epoch - 5ms/step Epoch 202/350 46/46 - 0s - loss: 0.6008 - accuracy: 0.8173 - val_loss: 0.5252 - val_accuracy: 0.8593 - 241ms/epoch - 5ms/step Epoch 203/350 46/46 - 0s - loss: 0.6026 - accuracy: 0.8148 - val_loss: 0.5153 - val_accuracy: 0.8607 - 244ms/epoch - 5ms/step Epoch 204/350 46/46 - 0s - loss: 0.5722 - accuracy: 0.8281 - val_loss: 0.5074 - val_accuracy: 0.8643 - 241ms/epoch - 5ms/step Epoch 205/350 46/46 - 0s - loss: 0.5787 - accuracy: 0.8233 - val_loss: 0.5032 - val_accuracy: 0.8697 - 240ms/epoch - 5ms/step Epoch 206/350 46/46 - 0s - loss: 0.5803 - accuracy: 0.8235 - val_loss: 0.5102 - val_accuracy: 0.8623 - 242ms/epoch - 5ms/step Epoch 207/350 46/46 - 0s - loss: 0.6055 - accuracy: 0.8206 - val_loss: 0.5168 - val_accuracy: 0.8590 - 242ms/epoch - 5ms/step Epoch 208/350 46/46 - 0s - loss: 0.5630 - accuracy: 0.8315 - val_loss: 0.5037 - val_accuracy: 0.8670 - 245ms/epoch - 5ms/step Epoch 209/350 46/46 - 0s - loss: 0.5731 - accuracy: 0.8241 - val_loss: 0.4937 - val_accuracy: 0.8740 - 243ms/epoch - 5ms/step Epoch 210/350 46/46 - 0s - loss: 0.5647 - accuracy: 0.8300 - val_loss: 0.5173 - val_accuracy: 0.8630 - 250ms/epoch - 5ms/step Epoch 211/350 46/46 - 0s - loss: 0.5658 - accuracy: 0.8299 - val_loss: 0.5028 - val_accuracy: 0.8643 - 255ms/epoch - 6ms/step Epoch 212/350 46/46 - 0s - loss: 0.5664 - accuracy: 0.8242 - val_loss: 0.5014 - val_accuracy: 0.8660 - 249ms/epoch - 5ms/step Epoch 213/350 46/46 - 0s - loss: 0.5633 - accuracy: 0.8331 - val_loss: 0.5104 - val_accuracy: 0.8603 - 240ms/epoch - 5ms/step Epoch 214/350 46/46 - 0s - loss: 0.5994 - accuracy: 0.8221 - val_loss: 0.5012 - val_accuracy: 0.8640 - 243ms/epoch - 5ms/step Epoch 215/350 46/46 - 0s - loss: 0.5756 - accuracy: 0.8253 - val_loss: 0.5159 - val_accuracy: 0.8663 - 242ms/epoch - 5ms/step Epoch 216/350 46/46 - 0s - loss: 0.5690 - accuracy: 0.8293 - val_loss: 0.5047 - val_accuracy: 0.8630 - 241ms/epoch - 5ms/step Epoch 217/350 46/46 - 0s - loss: 0.5554 - accuracy: 0.8329 - val_loss: 0.4871 - val_accuracy: 0.8743 - 241ms/epoch - 5ms/step Epoch 218/350 46/46 - 0s - loss: 0.5588 - accuracy: 0.8324 - val_loss: 0.4792 - val_accuracy: 0.8740 - 241ms/epoch - 5ms/step Epoch 219/350 46/46 - 0s - loss: 0.5562 - accuracy: 0.8321 - val_loss: 0.4843 - val_accuracy: 0.8730 - 246ms/epoch - 5ms/step Epoch 220/350 46/46 - 0s - loss: 0.5344 - accuracy: 0.8356 - val_loss: 0.5049 - val_accuracy: 0.8660 - 245ms/epoch - 5ms/step Epoch 221/350 46/46 - 0s - loss: 0.5463 - accuracy: 0.8341 - val_loss: 0.4854 - val_accuracy: 0.8733 - 242ms/epoch - 5ms/step Epoch 222/350 46/46 - 0s - loss: 0.6018 - accuracy: 0.8212 - val_loss: 0.5104 - val_accuracy: 0.8650 - 243ms/epoch - 5ms/step Epoch 223/350 46/46 - 0s - loss: 0.5625 - accuracy: 0.8241 - val_loss: 0.4881 - val_accuracy: 0.8713 - 244ms/epoch - 5ms/step Epoch 224/350 46/46 - 0s - loss: 0.5283 - accuracy: 0.8411 - val_loss: 0.4817 - val_accuracy: 0.8720 - 242ms/epoch - 5ms/step Epoch 225/350 46/46 - 0s - loss: 0.5327 - accuracy: 0.8372 - val_loss: 0.4810 - val_accuracy: 0.8727 - 244ms/epoch - 5ms/step Epoch 226/350 46/46 - 0s - loss: 0.5507 - accuracy: 0.8377 - val_loss: 0.4689 - val_accuracy: 0.8763 - 253ms/epoch - 5ms/step Epoch 227/350 46/46 - 0s - loss: 0.5542 - accuracy: 0.8313 - val_loss: 0.4903 - val_accuracy: 0.8683 - 253ms/epoch - 6ms/step Epoch 228/350 46/46 - 0s - loss: 0.5477 - accuracy: 0.8354 - val_loss: 0.4733 - val_accuracy: 0.8753 - 242ms/epoch - 5ms/step Epoch 229/350 46/46 - 0s - loss: 0.5189 - accuracy: 0.8423 - val_loss: 0.4690 - val_accuracy: 0.8770 - 239ms/epoch - 5ms/step Epoch 230/350 46/46 - 0s - loss: 0.5364 - accuracy: 0.8393 - val_loss: 0.4693 - val_accuracy: 0.8763 - 240ms/epoch - 5ms/step Epoch 231/350 46/46 - 0s - loss: 0.5458 - accuracy: 0.8435 - val_loss: 0.4883 - val_accuracy: 0.8713 - 245ms/epoch - 5ms/step Epoch 232/350 46/46 - 0s - loss: 0.5212 - accuracy: 0.8426 - val_loss: 0.4991 - val_accuracy: 0.8680 - 241ms/epoch - 5ms/step Epoch 233/350 46/46 - 0s - loss: 0.5804 - accuracy: 0.8279 - val_loss: 0.4797 - val_accuracy: 0.8753 - 247ms/epoch - 5ms/step Epoch 234/350 46/46 - 0s - loss: 0.5464 - accuracy: 0.8386 - val_loss: 0.4625 - val_accuracy: 0.8763 - 246ms/epoch - 5ms/step Epoch 235/350 46/46 - 0s - loss: 0.5204 - accuracy: 0.8467 - val_loss: 0.4846 - val_accuracy: 0.8700 - 247ms/epoch - 5ms/step Epoch 236/350 46/46 - 0s - loss: 0.5337 - accuracy: 0.8370 - val_loss: 0.5741 - val_accuracy: 0.8400 - 242ms/epoch - 5ms/step Epoch 237/350 46/46 - 0s - loss: 0.5222 - accuracy: 0.8461 - val_loss: 0.4586 - val_accuracy: 0.8803 - 240ms/epoch - 5ms/step Epoch 238/350 46/46 - 0s - loss: 0.5222 - accuracy: 0.8417 - val_loss: 0.5212 - val_accuracy: 0.8583 - 237ms/epoch - 5ms/step Epoch 239/350 46/46 - 0s - loss: 0.5533 - accuracy: 0.8336 - val_loss: 0.4732 - val_accuracy: 0.8770 - 243ms/epoch - 5ms/step Epoch 240/350 46/46 - 0s - loss: 0.5525 - accuracy: 0.8315 - val_loss: 0.4846 - val_accuracy: 0.8737 - 239ms/epoch - 5ms/step Epoch 241/350 46/46 - 0s - loss: 0.5487 - accuracy: 0.8377 - val_loss: 0.4838 - val_accuracy: 0.8770 - 238ms/epoch - 5ms/step Epoch 242/350 46/46 - 0s - loss: 0.5095 - accuracy: 0.8445 - val_loss: 0.4699 - val_accuracy: 0.8787 - 237ms/epoch - 5ms/step Epoch 243/350 46/46 - 0s - loss: 0.5120 - accuracy: 0.8449 - val_loss: 0.4983 - val_accuracy: 0.8707 - 241ms/epoch - 5ms/step Epoch 244/350 46/46 - 0s - loss: 0.5172 - accuracy: 0.8456 - val_loss: 0.4772 - val_accuracy: 0.8767 - 241ms/epoch - 5ms/step Epoch 245/350 46/46 - 0s - loss: 0.4983 - accuracy: 0.8531 - val_loss: 0.4663 - val_accuracy: 0.8780 - 244ms/epoch - 5ms/step Epoch 246/350 46/46 - 0s - loss: 0.5302 - accuracy: 0.8396 - val_loss: 0.4769 - val_accuracy: 0.8737 - 243ms/epoch - 5ms/step Epoch 247/350 46/46 - 0s - loss: 0.5034 - accuracy: 0.8451 - val_loss: 0.4862 - val_accuracy: 0.8703 - 242ms/epoch - 5ms/step Epoch 248/350 46/46 - 0s - loss: 0.5089 - accuracy: 0.8479 - val_loss: 0.4706 - val_accuracy: 0.8763 - 246ms/epoch - 5ms/step Epoch 249/350 46/46 - 0s - loss: 0.4901 - accuracy: 0.8525 - val_loss: 0.4611 - val_accuracy: 0.8813 - 247ms/epoch - 5ms/step Epoch 250/350 46/46 - 0s - loss: 0.5114 - accuracy: 0.8515 - val_loss: 0.4728 - val_accuracy: 0.8737 - 238ms/epoch - 5ms/step Epoch 251/350 46/46 - 0s - loss: 0.4995 - accuracy: 0.8455 - val_loss: 0.4743 - val_accuracy: 0.8733 - 240ms/epoch - 5ms/step Epoch 252/350 46/46 - 0s - loss: 0.5434 - accuracy: 0.8377 - val_loss: 0.4709 - val_accuracy: 0.8767 - 240ms/epoch - 5ms/step Epoch 253/350 46/46 - 0s - loss: 0.4990 - accuracy: 0.8494 - val_loss: 0.4668 - val_accuracy: 0.8737 - 240ms/epoch - 5ms/step Epoch 254/350 46/46 - 0s - loss: 0.5057 - accuracy: 0.8460 - val_loss: 0.4531 - val_accuracy: 0.8847 - 238ms/epoch - 5ms/step Epoch 255/350 46/46 - 0s - loss: 0.5252 - accuracy: 0.8466 - val_loss: 0.4577 - val_accuracy: 0.8823 - 244ms/epoch - 5ms/step Epoch 256/350 46/46 - 0s - loss: 0.5083 - accuracy: 0.8467 - val_loss: 0.4303 - val_accuracy: 0.8887 - 243ms/epoch - 5ms/step Epoch 257/350 46/46 - 0s - loss: 0.4972 - accuracy: 0.8484 - val_loss: 0.4483 - val_accuracy: 0.8803 - 242ms/epoch - 5ms/step Epoch 258/350 46/46 - 0s - loss: 0.5022 - accuracy: 0.8506 - val_loss: 0.4549 - val_accuracy: 0.8800 - 242ms/epoch - 5ms/step Epoch 259/350 46/46 - 0s - loss: 0.4986 - accuracy: 0.8499 - val_loss: 0.4710 - val_accuracy: 0.8713 - 243ms/epoch - 5ms/step Epoch 260/350 46/46 - 0s - loss: 0.5091 - accuracy: 0.8465 - val_loss: 0.4580 - val_accuracy: 0.8793 - 241ms/epoch - 5ms/step Epoch 261/350 46/46 - 0s - loss: 0.5217 - accuracy: 0.8418 - val_loss: 0.4485 - val_accuracy: 0.8857 - 246ms/epoch - 5ms/step Epoch 262/350 46/46 - 0s - loss: 0.4889 - accuracy: 0.8518 - val_loss: 0.4981 - val_accuracy: 0.8663 - 239ms/epoch - 5ms/step Epoch 263/350 46/46 - 0s - loss: 0.5097 - accuracy: 0.8506 - val_loss: 0.4519 - val_accuracy: 0.8797 - 245ms/epoch - 5ms/step Epoch 264/350 46/46 - 0s - loss: 0.4914 - accuracy: 0.8501 - val_loss: 0.4520 - val_accuracy: 0.8817 - 241ms/epoch - 5ms/step Epoch 265/350 46/46 - 0s - loss: 0.4930 - accuracy: 0.8528 - val_loss: 0.4469 - val_accuracy: 0.8867 - 239ms/epoch - 5ms/step Epoch 266/350 46/46 - 0s - loss: 0.4971 - accuracy: 0.8502 - val_loss: 0.4682 - val_accuracy: 0.8753 - 236ms/epoch - 5ms/step Epoch 267/350 46/46 - 0s - loss: 0.4949 - accuracy: 0.8551 - val_loss: 0.4610 - val_accuracy: 0.8797 - 242ms/epoch - 5ms/step Epoch 268/350 46/46 - 0s - loss: 0.4694 - accuracy: 0.8560 - val_loss: 0.4670 - val_accuracy: 0.8743 - 243ms/epoch - 5ms/step Epoch 269/350 46/46 - 0s - loss: 0.5030 - accuracy: 0.8515 - val_loss: 0.4632 - val_accuracy: 0.8780 - 242ms/epoch - 5ms/step Epoch 270/350 46/46 - 0s - loss: 0.4871 - accuracy: 0.8553 - val_loss: 0.4576 - val_accuracy: 0.8807 - 238ms/epoch - 5ms/step Epoch 271/350 46/46 - 0s - loss: 0.4929 - accuracy: 0.8539 - val_loss: 0.4488 - val_accuracy: 0.8817 - 240ms/epoch - 5ms/step Epoch 272/350 46/46 - 0s - loss: 0.4735 - accuracy: 0.8607 - val_loss: 0.4432 - val_accuracy: 0.8830 - 240ms/epoch - 5ms/step Epoch 273/350 46/46 - 0s - loss: 0.4778 - accuracy: 0.8567 - val_loss: 0.5561 - val_accuracy: 0.8513 - 239ms/epoch - 5ms/step Epoch 274/350 46/46 - 0s - loss: 0.5104 - accuracy: 0.8479 - val_loss: 0.4395 - val_accuracy: 0.8890 - 237ms/epoch - 5ms/step Epoch 275/350 46/46 - 0s - loss: 0.4650 - accuracy: 0.8621 - val_loss: 0.4523 - val_accuracy: 0.8807 - 242ms/epoch - 5ms/step Epoch 276/350 46/46 - 0s - loss: 0.5003 - accuracy: 0.8489 - val_loss: 0.5488 - val_accuracy: 0.8497 - 241ms/epoch - 5ms/step Epoch 277/350 46/46 - 0s - loss: 0.5048 - accuracy: 0.8497 - val_loss: 0.4375 - val_accuracy: 0.8843 - 241ms/epoch - 5ms/step Epoch 278/350 46/46 - 0s - loss: 0.4675 - accuracy: 0.8540 - val_loss: 0.4435 - val_accuracy: 0.8847 - 242ms/epoch - 5ms/step Epoch 279/350 46/46 - 0s - loss: 0.4700 - accuracy: 0.8631 - val_loss: 0.4423 - val_accuracy: 0.8833 - 240ms/epoch - 5ms/step Epoch 280/350 46/46 - 0s - loss: 0.4648 - accuracy: 0.8603 - val_loss: 0.4449 - val_accuracy: 0.8807 - 243ms/epoch - 5ms/step Epoch 281/350 46/46 - 0s - loss: 0.4817 - accuracy: 0.8579 - val_loss: 0.4979 - val_accuracy: 0.8630 - 244ms/epoch - 5ms/step Epoch 282/350 46/46 - 0s - loss: 0.4789 - accuracy: 0.8564 - val_loss: 0.4508 - val_accuracy: 0.8850 - 241ms/epoch - 5ms/step Epoch 283/350 46/46 - 0s - loss: 0.4711 - accuracy: 0.8608 - val_loss: 0.4451 - val_accuracy: 0.8833 - 243ms/epoch - 5ms/step Epoch 284/350 46/46 - 0s - loss: 0.4614 - accuracy: 0.8673 - val_loss: 0.4281 - val_accuracy: 0.8907 - 239ms/epoch - 5ms/step Epoch 285/350 46/46 - 0s - loss: 0.4696 - accuracy: 0.8548 - val_loss: 0.4260 - val_accuracy: 0.8880 - 241ms/epoch - 5ms/step Epoch 286/350 46/46 - 0s - loss: 0.4973 - accuracy: 0.8541 - val_loss: 0.4655 - val_accuracy: 0.8780 - 240ms/epoch - 5ms/step Epoch 287/350 46/46 - 0s - loss: 0.4597 - accuracy: 0.8604 - val_loss: 0.4698 - val_accuracy: 0.8823 - 243ms/epoch - 5ms/step Epoch 288/350 46/46 - 0s - loss: 0.4715 - accuracy: 0.8603 - val_loss: 0.4208 - val_accuracy: 0.8903 - 243ms/epoch - 5ms/step Epoch 289/350 46/46 - 0s - loss: 0.4719 - accuracy: 0.8628 - val_loss: 0.4610 - val_accuracy: 0.8753 - 239ms/epoch - 5ms/step Epoch 290/350 46/46 - 0s - loss: 0.4629 - accuracy: 0.8625 - val_loss: 0.4341 - val_accuracy: 0.8850 - 240ms/epoch - 5ms/step Epoch 291/350 46/46 - 0s - loss: 0.4412 - accuracy: 0.8695 - val_loss: 0.4411 - val_accuracy: 0.8880 - 241ms/epoch - 5ms/step Epoch 292/350 46/46 - 0s - loss: 0.4545 - accuracy: 0.8639 - val_loss: 0.4577 - val_accuracy: 0.8777 - 242ms/epoch - 5ms/step Epoch 293/350 46/46 - 0s - loss: 0.4450 - accuracy: 0.8733 - val_loss: 0.4611 - val_accuracy: 0.8810 - 241ms/epoch - 5ms/step Epoch 294/350 46/46 - 0s - loss: 0.4548 - accuracy: 0.8611 - val_loss: 0.4436 - val_accuracy: 0.8797 - 246ms/epoch - 5ms/step Epoch 295/350 46/46 - 0s - loss: 0.4547 - accuracy: 0.8656 - val_loss: 0.4283 - val_accuracy: 0.8863 - 245ms/epoch - 5ms/step Epoch 296/350 46/46 - 0s - loss: 0.4646 - accuracy: 0.8650 - val_loss: 0.4100 - val_accuracy: 0.8903 - 246ms/epoch - 5ms/step Epoch 297/350 46/46 - 0s - loss: 0.4570 - accuracy: 0.8633 - val_loss: 0.4498 - val_accuracy: 0.8827 - 241ms/epoch - 5ms/step Epoch 298/350 46/46 - 0s - loss: 0.4637 - accuracy: 0.8617 - val_loss: 0.4178 - val_accuracy: 0.8903 - 241ms/epoch - 5ms/step Epoch 299/350 46/46 - 0s - loss: 0.4305 - accuracy: 0.8680 - val_loss: 0.4544 - val_accuracy: 0.8777 - 250ms/epoch - 5ms/step Epoch 300/350 46/46 - 0s - loss: 0.4406 - accuracy: 0.8703 - val_loss: 0.4272 - val_accuracy: 0.8860 - 254ms/epoch - 6ms/step Epoch 301/350 46/46 - 0s - loss: 0.4489 - accuracy: 0.8640 - val_loss: 0.4240 - val_accuracy: 0.8847 - 250ms/epoch - 5ms/step Epoch 302/350 46/46 - 0s - loss: 0.4377 - accuracy: 0.8732 - val_loss: 0.4134 - val_accuracy: 0.8913 - 246ms/epoch - 5ms/step Epoch 303/350 46/46 - 0s - loss: 0.4537 - accuracy: 0.8649 - val_loss: 0.4180 - val_accuracy: 0.8887 - 241ms/epoch - 5ms/step Epoch 304/350 46/46 - 0s - loss: 0.4627 - accuracy: 0.8614 - val_loss: 0.4191 - val_accuracy: 0.8947 - 247ms/epoch - 5ms/step Epoch 305/350 46/46 - 0s - loss: 0.4610 - accuracy: 0.8604 - val_loss: 0.4435 - val_accuracy: 0.8790 - 242ms/epoch - 5ms/step Epoch 306/350 46/46 - 0s - loss: 0.4343 - accuracy: 0.8692 - val_loss: 0.4162 - val_accuracy: 0.8933 - 245ms/epoch - 5ms/step Epoch 307/350 46/46 - 0s - loss: 0.4405 - accuracy: 0.8717 - val_loss: 0.4085 - val_accuracy: 0.8947 - 242ms/epoch - 5ms/step Epoch 308/350 46/46 - 0s - loss: 0.4429 - accuracy: 0.8694 - val_loss: 0.4203 - val_accuracy: 0.8880 - 246ms/epoch - 5ms/step Epoch 309/350 46/46 - 0s - loss: 0.4446 - accuracy: 0.8670 - val_loss: 0.4253 - val_accuracy: 0.8863 - 254ms/epoch - 6ms/step Epoch 310/350 46/46 - 0s - loss: 0.4415 - accuracy: 0.8696 - val_loss: 0.4350 - val_accuracy: 0.8810 - 252ms/epoch - 5ms/step Epoch 311/350 46/46 - 0s - loss: 0.4425 - accuracy: 0.8665 - val_loss: 0.4231 - val_accuracy: 0.8887 - 252ms/epoch - 5ms/step Epoch 312/350 46/46 - 0s - loss: 0.4626 - accuracy: 0.8645 - val_loss: 0.4222 - val_accuracy: 0.8937 - 244ms/epoch - 5ms/step Epoch 313/350 46/46 - 0s - loss: 0.4362 - accuracy: 0.8737 - val_loss: 0.4452 - val_accuracy: 0.8787 - 243ms/epoch - 5ms/step Epoch 314/350 46/46 - 0s - loss: 0.4551 - accuracy: 0.8656 - val_loss: 0.4153 - val_accuracy: 0.8937 - 241ms/epoch - 5ms/step Epoch 315/350 46/46 - 0s - loss: 0.4543 - accuracy: 0.8683 - val_loss: 0.4476 - val_accuracy: 0.8870 - 244ms/epoch - 5ms/step Epoch 316/350 46/46 - 0s - loss: 0.4515 - accuracy: 0.8717 - val_loss: 0.4009 - val_accuracy: 0.8967 - 254ms/epoch - 6ms/step Epoch 317/350 46/46 - 0s - loss: 0.4598 - accuracy: 0.8643 - val_loss: 0.4403 - val_accuracy: 0.8843 - 241ms/epoch - 5ms/step Epoch 318/350 46/46 - 0s - loss: 0.4377 - accuracy: 0.8677 - val_loss: 0.4345 - val_accuracy: 0.8893 - 240ms/epoch - 5ms/step Epoch 319/350 46/46 - 0s - loss: 0.4321 - accuracy: 0.8720 - val_loss: 0.4129 - val_accuracy: 0.8920 - 238ms/epoch - 5ms/step Epoch 320/350 46/46 - 0s - loss: 0.4355 - accuracy: 0.8716 - val_loss: 0.4009 - val_accuracy: 0.8980 - 241ms/epoch - 5ms/step Epoch 321/350 46/46 - 0s - loss: 0.4544 - accuracy: 0.8670 - val_loss: 0.4303 - val_accuracy: 0.8893 - 245ms/epoch - 5ms/step Epoch 322/350 46/46 - 0s - loss: 0.4490 - accuracy: 0.8698 - val_loss: 0.4109 - val_accuracy: 0.8943 - 240ms/epoch - 5ms/step Epoch 323/350 46/46 - 0s - loss: 0.4440 - accuracy: 0.8626 - val_loss: 0.5252 - val_accuracy: 0.8573 - 241ms/epoch - 5ms/step Epoch 324/350 46/46 - 0s - loss: 0.4483 - accuracy: 0.8677 - val_loss: 0.4199 - val_accuracy: 0.8957 - 242ms/epoch - 5ms/step Epoch 325/350 46/46 - 0s - loss: 0.4143 - accuracy: 0.8772 - val_loss: 0.4161 - val_accuracy: 0.8890 - 243ms/epoch - 5ms/step Epoch 326/350 46/46 - 0s - loss: 0.4210 - accuracy: 0.8738 - val_loss: 0.4090 - val_accuracy: 0.8913 - 243ms/epoch - 5ms/step Epoch 327/350 46/46 - 0s - loss: 0.4322 - accuracy: 0.8712 - val_loss: 0.4058 - val_accuracy: 0.8953 - 243ms/epoch - 5ms/step Epoch 328/350 46/46 - 0s - loss: 0.4312 - accuracy: 0.8749 - val_loss: 0.4159 - val_accuracy: 0.8877 - 243ms/epoch - 5ms/step Epoch 329/350 46/46 - 0s - loss: 0.4289 - accuracy: 0.8690 - val_loss: 0.4122 - val_accuracy: 0.8950 - 240ms/epoch - 5ms/step Epoch 330/350 46/46 - 0s - loss: 0.4286 - accuracy: 0.8730 - val_loss: 0.3994 - val_accuracy: 0.8977 - 241ms/epoch - 5ms/step Epoch 331/350 46/46 - 0s - loss: 0.4433 - accuracy: 0.8718 - val_loss: 0.4145 - val_accuracy: 0.8913 - 239ms/epoch - 5ms/step Epoch 332/350 46/46 - 0s - loss: 0.4146 - accuracy: 0.8759 - val_loss: 0.4121 - val_accuracy: 0.8883 - 244ms/epoch - 5ms/step Epoch 333/350 46/46 - 0s - loss: 0.4439 - accuracy: 0.8696 - val_loss: 0.4336 - val_accuracy: 0.8803 - 242ms/epoch - 5ms/step Epoch 334/350 46/46 - 0s - loss: 0.4170 - accuracy: 0.8770 - val_loss: 0.4036 - val_accuracy: 0.8903 - 244ms/epoch - 5ms/step Epoch 335/350 46/46 - 0s - loss: 0.4303 - accuracy: 0.8739 - val_loss: 0.4382 - val_accuracy: 0.8817 - 253ms/epoch - 5ms/step Epoch 336/350 46/46 - 0s - loss: 0.4233 - accuracy: 0.8739 - val_loss: 0.3928 - val_accuracy: 0.8973 - 250ms/epoch - 5ms/step Epoch 337/350 46/46 - 0s - loss: 0.4075 - accuracy: 0.8820 - val_loss: 0.3927 - val_accuracy: 0.8963 - 244ms/epoch - 5ms/step Epoch 338/350 46/46 - 0s - loss: 0.4182 - accuracy: 0.8725 - val_loss: 0.4112 - val_accuracy: 0.8890 - 243ms/epoch - 5ms/step Epoch 339/350 46/46 - 0s - loss: 0.4250 - accuracy: 0.8730 - val_loss: 0.4090 - val_accuracy: 0.8923 - 243ms/epoch - 5ms/step Epoch 340/350 46/46 - 0s - loss: 0.4205 - accuracy: 0.8736 - val_loss: 0.4147 - val_accuracy: 0.8917 - 240ms/epoch - 5ms/step Epoch 341/350 46/46 - 0s - loss: 0.4169 - accuracy: 0.8759 - val_loss: 0.4076 - val_accuracy: 0.8877 - 242ms/epoch - 5ms/step Epoch 342/350 46/46 - 0s - loss: 0.4150 - accuracy: 0.8778 - val_loss: 0.3987 - val_accuracy: 0.8927 - 243ms/epoch - 5ms/step Epoch 343/350 46/46 - 0s - loss: 0.4265 - accuracy: 0.8745 - val_loss: 0.4201 - val_accuracy: 0.8873 - 241ms/epoch - 5ms/step Epoch 344/350 46/46 - 0s - loss: 0.4430 - accuracy: 0.8704 - val_loss: 0.4243 - val_accuracy: 0.8883 - 248ms/epoch - 5ms/step Epoch 345/350 46/46 - 0s - loss: 0.4179 - accuracy: 0.8776 - val_loss: 0.4062 - val_accuracy: 0.8913 - 245ms/epoch - 5ms/step Epoch 346/350 46/46 - 0s - loss: 0.4559 - accuracy: 0.8658 - val_loss: 0.4037 - val_accuracy: 0.8957 - 245ms/epoch - 5ms/step Epoch 347/350 46/46 - 0s - loss: 0.4140 - accuracy: 0.8773 - val_loss: 0.4178 - val_accuracy: 0.8893 - 244ms/epoch - 5ms/step Epoch 348/350 46/46 - 0s - loss: 0.4129 - accuracy: 0.8769 - val_loss: 0.4109 - val_accuracy: 0.8910 - 246ms/epoch - 5ms/step Epoch 349/350 46/46 - 0s - loss: 0.4120 - accuracy: 0.8789 - val_loss: 0.4493 - val_accuracy: 0.8810 - 242ms/epoch - 5ms/step Epoch 350/350 46/46 - 0s - loss: 0.4451 - accuracy: 0.8685 - val_loss: 0.4087 - val_accuracy: 0.8913 - 241ms/epoch - 5ms/step 94/94 - 1s - loss: 0.4087 - accuracy: 0.8913 - 694ms/epoch - 7ms/step Baseline Error: 10.87%
model_31new.save('extraregularized31.h5') #Save model as results are promising
We can see that the model's performance now is excellent, we can see that near the end of the 300 epochs, there is still some sort of increase in accuracy, we can try to increase the epochs to 500 and see if we can get better results. However this is not a good idea as the runtime will increase by a lot for a small increase in accuracy, instead we can try to implement batch normalization for quicker convergence.
#Batch normalization
model_31new = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(31, 31, 1)),
layers.MaxPooling2D((2, 2)),
layers.Dropout(0.2),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Dropout(0.2),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Dropout(0.2),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dropout(0.5),
layers.BatchNormalization(), # add batch norm, i have tried other combinations, this seems to work the best with the least jumps in the data
layers.Dense(15, activation='softmax')
])
model_31new.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model_31new.fit(X_train31, train_labels, validation_data=(X_val31, validation_labels),
epochs=350, batch_size=200, verbose=2, class_weight = class_weight) #adjust epochs to 300
scores = model_31new.evaluate(X_val31, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df31.loc[len(df31)] = ['NormalizedModel', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/350
2023-11-26 13:24:03.470223: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Size of values 0 does not match size of permutation 4 @ fanin shape insequential_11/dropout_7/dropout/SelectV2-2-TransposeNHWCToNCHW-LayoutOptimizer
46/46 - 3s - loss: 3.6186 - accuracy: 0.0757 - val_loss: 2.8064 - val_accuracy: 0.0667 - 3s/epoch - 61ms/step Epoch 2/350 46/46 - 0s - loss: 3.2225 - accuracy: 0.1310 - val_loss: 2.8256 - val_accuracy: 0.1113 - 266ms/epoch - 6ms/step Epoch 3/350 46/46 - 0s - loss: 2.9812 - accuracy: 0.1683 - val_loss: 2.8506 - val_accuracy: 0.1163 - 266ms/epoch - 6ms/step Epoch 4/350 46/46 - 0s - loss: 2.8133 - accuracy: 0.1978 - val_loss: 2.8127 - val_accuracy: 0.1187 - 260ms/epoch - 6ms/step Epoch 5/350 46/46 - 0s - loss: 2.6429 - accuracy: 0.2379 - val_loss: 2.7536 - val_accuracy: 0.1227 - 269ms/epoch - 6ms/step Epoch 6/350 46/46 - 0s - loss: 2.4832 - accuracy: 0.2646 - val_loss: 2.6598 - val_accuracy: 0.1447 - 260ms/epoch - 6ms/step Epoch 7/350 46/46 - 0s - loss: 2.3464 - accuracy: 0.2913 - val_loss: 2.5492 - val_accuracy: 0.1760 - 264ms/epoch - 6ms/step Epoch 8/350 46/46 - 0s - loss: 2.2419 - accuracy: 0.3119 - val_loss: 2.4387 - val_accuracy: 0.2067 - 264ms/epoch - 6ms/step Epoch 9/350 46/46 - 0s - loss: 2.1416 - accuracy: 0.3440 - val_loss: 2.2637 - val_accuracy: 0.2673 - 262ms/epoch - 6ms/step Epoch 10/350 46/46 - 0s - loss: 2.0251 - accuracy: 0.3756 - val_loss: 2.0980 - val_accuracy: 0.3470 - 260ms/epoch - 6ms/step Epoch 11/350 46/46 - 0s - loss: 1.9532 - accuracy: 0.3936 - val_loss: 2.0366 - val_accuracy: 0.3110 - 268ms/epoch - 6ms/step Epoch 12/350 46/46 - 0s - loss: 1.8691 - accuracy: 0.4140 - val_loss: 1.8701 - val_accuracy: 0.4017 - 260ms/epoch - 6ms/step Epoch 13/350 46/46 - 0s - loss: 1.8253 - accuracy: 0.4258 - val_loss: 1.8703 - val_accuracy: 0.3993 - 262ms/epoch - 6ms/step Epoch 14/350 46/46 - 0s - loss: 1.7511 - accuracy: 0.4478 - val_loss: 1.6397 - val_accuracy: 0.4817 - 263ms/epoch - 6ms/step Epoch 15/350 46/46 - 0s - loss: 1.6864 - accuracy: 0.4710 - val_loss: 1.6621 - val_accuracy: 0.4540 - 264ms/epoch - 6ms/step Epoch 16/350 46/46 - 0s - loss: 1.6433 - accuracy: 0.4891 - val_loss: 1.5623 - val_accuracy: 0.5087 - 259ms/epoch - 6ms/step Epoch 17/350 46/46 - 0s - loss: 1.5900 - accuracy: 0.5025 - val_loss: 1.4489 - val_accuracy: 0.5370 - 273ms/epoch - 6ms/step Epoch 18/350 46/46 - 0s - loss: 1.5238 - accuracy: 0.5173 - val_loss: 1.3929 - val_accuracy: 0.5790 - 263ms/epoch - 6ms/step Epoch 19/350 46/46 - 0s - loss: 1.4626 - accuracy: 0.5393 - val_loss: 1.2952 - val_accuracy: 0.5943 - 266ms/epoch - 6ms/step Epoch 20/350 46/46 - 0s - loss: 1.4558 - accuracy: 0.5455 - val_loss: 1.2923 - val_accuracy: 0.6087 - 262ms/epoch - 6ms/step Epoch 21/350 46/46 - 0s - loss: 1.3842 - accuracy: 0.5707 - val_loss: 1.2745 - val_accuracy: 0.6140 - 265ms/epoch - 6ms/step Epoch 22/350 46/46 - 0s - loss: 1.3494 - accuracy: 0.5767 - val_loss: 1.2450 - val_accuracy: 0.6180 - 264ms/epoch - 6ms/step Epoch 23/350 46/46 - 0s - loss: 1.2993 - accuracy: 0.5964 - val_loss: 1.1441 - val_accuracy: 0.6650 - 263ms/epoch - 6ms/step Epoch 24/350 46/46 - 0s - loss: 1.2727 - accuracy: 0.6021 - val_loss: 1.1256 - val_accuracy: 0.6587 - 260ms/epoch - 6ms/step Epoch 25/350 46/46 - 0s - loss: 1.2759 - accuracy: 0.5997 - val_loss: 1.2297 - val_accuracy: 0.6280 - 263ms/epoch - 6ms/step Epoch 26/350 46/46 - 0s - loss: 1.2396 - accuracy: 0.6046 - val_loss: 1.0747 - val_accuracy: 0.6820 - 264ms/epoch - 6ms/step Epoch 27/350 46/46 - 0s - loss: 1.1887 - accuracy: 0.6261 - val_loss: 1.0162 - val_accuracy: 0.6947 - 264ms/epoch - 6ms/step Epoch 28/350 46/46 - 0s - loss: 1.1250 - accuracy: 0.6478 - val_loss: 0.9435 - val_accuracy: 0.7103 - 266ms/epoch - 6ms/step Epoch 29/350 46/46 - 0s - loss: 1.1121 - accuracy: 0.6561 - val_loss: 0.9719 - val_accuracy: 0.7173 - 263ms/epoch - 6ms/step Epoch 30/350 46/46 - 0s - loss: 1.0726 - accuracy: 0.6654 - val_loss: 0.9135 - val_accuracy: 0.7240 - 260ms/epoch - 6ms/step Epoch 31/350 46/46 - 0s - loss: 1.0589 - accuracy: 0.6711 - val_loss: 0.9186 - val_accuracy: 0.7303 - 263ms/epoch - 6ms/step Epoch 32/350 46/46 - 0s - loss: 1.0279 - accuracy: 0.6818 - val_loss: 0.8477 - val_accuracy: 0.7500 - 264ms/epoch - 6ms/step Epoch 33/350 46/46 - 0s - loss: 1.0117 - accuracy: 0.6827 - val_loss: 0.9076 - val_accuracy: 0.7300 - 261ms/epoch - 6ms/step Epoch 34/350 46/46 - 0s - loss: 1.0118 - accuracy: 0.6848 - val_loss: 0.7808 - val_accuracy: 0.7737 - 262ms/epoch - 6ms/step Epoch 35/350 46/46 - 0s - loss: 0.9737 - accuracy: 0.7071 - val_loss: 0.8271 - val_accuracy: 0.7500 - 264ms/epoch - 6ms/step Epoch 36/350 46/46 - 0s - loss: 0.9494 - accuracy: 0.7069 - val_loss: 0.7619 - val_accuracy: 0.7727 - 267ms/epoch - 6ms/step Epoch 37/350 46/46 - 0s - loss: 0.9236 - accuracy: 0.7150 - val_loss: 0.8161 - val_accuracy: 0.7577 - 269ms/epoch - 6ms/step Epoch 38/350 46/46 - 0s - loss: 0.9122 - accuracy: 0.7242 - val_loss: 0.7770 - val_accuracy: 0.7797 - 273ms/epoch - 6ms/step Epoch 39/350 46/46 - 0s - loss: 0.9294 - accuracy: 0.7178 - val_loss: 0.7616 - val_accuracy: 0.7847 - 281ms/epoch - 6ms/step Epoch 40/350 46/46 - 0s - loss: 0.8997 - accuracy: 0.7276 - val_loss: 0.8171 - val_accuracy: 0.7657 - 277ms/epoch - 6ms/step Epoch 41/350 46/46 - 0s - loss: 0.8729 - accuracy: 0.7332 - val_loss: 0.6592 - val_accuracy: 0.8113 - 278ms/epoch - 6ms/step Epoch 42/350 46/46 - 0s - loss: 0.8399 - accuracy: 0.7421 - val_loss: 0.6826 - val_accuracy: 0.8017 - 277ms/epoch - 6ms/step Epoch 43/350 46/46 - 0s - loss: 0.8580 - accuracy: 0.7385 - val_loss: 0.7069 - val_accuracy: 0.7930 - 285ms/epoch - 6ms/step Epoch 44/350 46/46 - 0s - loss: 0.8080 - accuracy: 0.7512 - val_loss: 0.7151 - val_accuracy: 0.7873 - 261ms/epoch - 6ms/step Epoch 45/350 46/46 - 0s - loss: 0.8567 - accuracy: 0.7391 - val_loss: 0.7311 - val_accuracy: 0.7870 - 266ms/epoch - 6ms/step Epoch 46/350 46/46 - 0s - loss: 0.7977 - accuracy: 0.7542 - val_loss: 0.6711 - val_accuracy: 0.8053 - 279ms/epoch - 6ms/step Epoch 47/350 46/46 - 0s - loss: 0.7913 - accuracy: 0.7562 - val_loss: 0.6190 - val_accuracy: 0.8223 - 277ms/epoch - 6ms/step Epoch 48/350 46/46 - 0s - loss: 0.7630 - accuracy: 0.7696 - val_loss: 0.6648 - val_accuracy: 0.8173 - 278ms/epoch - 6ms/step Epoch 49/350 46/46 - 0s - loss: 0.7751 - accuracy: 0.7702 - val_loss: 0.6361 - val_accuracy: 0.8190 - 272ms/epoch - 6ms/step Epoch 50/350 46/46 - 0s - loss: 0.7875 - accuracy: 0.7582 - val_loss: 0.6151 - val_accuracy: 0.8290 - 287ms/epoch - 6ms/step Epoch 51/350 46/46 - 0s - loss: 0.7752 - accuracy: 0.7677 - val_loss: 0.6471 - val_accuracy: 0.8160 - 272ms/epoch - 6ms/step Epoch 52/350 46/46 - 0s - loss: 0.7519 - accuracy: 0.7710 - val_loss: 0.6156 - val_accuracy: 0.8233 - 285ms/epoch - 6ms/step Epoch 53/350 46/46 - 0s - loss: 0.7308 - accuracy: 0.7797 - val_loss: 0.6384 - val_accuracy: 0.8160 - 278ms/epoch - 6ms/step Epoch 54/350 46/46 - 0s - loss: 0.6939 - accuracy: 0.7861 - val_loss: 0.5708 - val_accuracy: 0.8443 - 282ms/epoch - 6ms/step Epoch 55/350 46/46 - 0s - loss: 0.7007 - accuracy: 0.7905 - val_loss: 0.5640 - val_accuracy: 0.8483 - 273ms/epoch - 6ms/step Epoch 56/350 46/46 - 0s - loss: 0.6928 - accuracy: 0.7902 - val_loss: 0.5875 - val_accuracy: 0.8307 - 263ms/epoch - 6ms/step Epoch 57/350 46/46 - 0s - loss: 0.6450 - accuracy: 0.8065 - val_loss: 0.5644 - val_accuracy: 0.8370 - 266ms/epoch - 6ms/step Epoch 58/350 46/46 - 0s - loss: 0.6533 - accuracy: 0.8013 - val_loss: 0.5848 - val_accuracy: 0.8463 - 278ms/epoch - 6ms/step Epoch 59/350 46/46 - 0s - loss: 0.6985 - accuracy: 0.7933 - val_loss: 0.5802 - val_accuracy: 0.8377 - 272ms/epoch - 6ms/step Epoch 60/350 46/46 - 0s - loss: 0.6507 - accuracy: 0.8035 - val_loss: 0.5557 - val_accuracy: 0.8433 - 266ms/epoch - 6ms/step Epoch 61/350 46/46 - 0s - loss: 0.6414 - accuracy: 0.8028 - val_loss: 0.5570 - val_accuracy: 0.8440 - 263ms/epoch - 6ms/step Epoch 62/350 46/46 - 0s - loss: 0.6392 - accuracy: 0.8037 - val_loss: 0.5160 - val_accuracy: 0.8573 - 265ms/epoch - 6ms/step Epoch 63/350 46/46 - 0s - loss: 0.6297 - accuracy: 0.8090 - val_loss: 0.5461 - val_accuracy: 0.8520 - 267ms/epoch - 6ms/step Epoch 64/350 46/46 - 0s - loss: 0.6606 - accuracy: 0.8047 - val_loss: 0.5390 - val_accuracy: 0.8497 - 260ms/epoch - 6ms/step Epoch 65/350 46/46 - 0s - loss: 0.6932 - accuracy: 0.7977 - val_loss: 0.5616 - val_accuracy: 0.8373 - 264ms/epoch - 6ms/step Epoch 66/350 46/46 - 0s - loss: 0.6110 - accuracy: 0.8150 - val_loss: 0.5209 - val_accuracy: 0.8623 - 262ms/epoch - 6ms/step Epoch 67/350 46/46 - 0s - loss: 0.6179 - accuracy: 0.8163 - val_loss: 0.5357 - val_accuracy: 0.8553 - 255ms/epoch - 6ms/step Epoch 68/350 46/46 - 0s - loss: 0.6124 - accuracy: 0.8190 - val_loss: 0.5707 - val_accuracy: 0.8390 - 262ms/epoch - 6ms/step Epoch 69/350 46/46 - 0s - loss: 0.6006 - accuracy: 0.8211 - val_loss: 0.5219 - val_accuracy: 0.8590 - 264ms/epoch - 6ms/step Epoch 70/350 46/46 - 0s - loss: 0.6008 - accuracy: 0.8175 - val_loss: 0.5472 - val_accuracy: 0.8447 - 276ms/epoch - 6ms/step Epoch 71/350 46/46 - 0s - loss: 0.6240 - accuracy: 0.8127 - val_loss: 0.4887 - val_accuracy: 0.8617 - 273ms/epoch - 6ms/step Epoch 72/350 46/46 - 0s - loss: 0.5801 - accuracy: 0.8304 - val_loss: 0.4981 - val_accuracy: 0.8600 - 272ms/epoch - 6ms/step Epoch 73/350 46/46 - 0s - loss: 0.5820 - accuracy: 0.8271 - val_loss: 0.5487 - val_accuracy: 0.8477 - 271ms/epoch - 6ms/step Epoch 74/350 46/46 - 0s - loss: 0.5766 - accuracy: 0.8269 - val_loss: 0.5035 - val_accuracy: 0.8677 - 263ms/epoch - 6ms/step Epoch 75/350 46/46 - 0s - loss: 0.5678 - accuracy: 0.8356 - val_loss: 0.4847 - val_accuracy: 0.8633 - 264ms/epoch - 6ms/step Epoch 76/350 46/46 - 0s - loss: 0.5764 - accuracy: 0.8267 - val_loss: 0.4969 - val_accuracy: 0.8613 - 261ms/epoch - 6ms/step Epoch 77/350 46/46 - 0s - loss: 0.5761 - accuracy: 0.8275 - val_loss: 0.5135 - val_accuracy: 0.8563 - 263ms/epoch - 6ms/step Epoch 78/350 46/46 - 0s - loss: 0.5680 - accuracy: 0.8309 - val_loss: 0.5291 - val_accuracy: 0.8547 - 259ms/epoch - 6ms/step Epoch 79/350 46/46 - 0s - loss: 0.6102 - accuracy: 0.8187 - val_loss: 0.5424 - val_accuracy: 0.8457 - 258ms/epoch - 6ms/step Epoch 80/350 46/46 - 0s - loss: 0.5956 - accuracy: 0.8263 - val_loss: 0.5895 - val_accuracy: 0.8447 - 261ms/epoch - 6ms/step Epoch 81/350 46/46 - 0s - loss: 0.5837 - accuracy: 0.8233 - val_loss: 0.4988 - val_accuracy: 0.8587 - 262ms/epoch - 6ms/step Epoch 82/350 46/46 - 0s - loss: 0.5211 - accuracy: 0.8401 - val_loss: 0.4909 - val_accuracy: 0.8647 - 263ms/epoch - 6ms/step Epoch 83/350 46/46 - 0s - loss: 0.5529 - accuracy: 0.8350 - val_loss: 0.4636 - val_accuracy: 0.8773 - 276ms/epoch - 6ms/step Epoch 84/350 46/46 - 0s - loss: 0.5448 - accuracy: 0.8356 - val_loss: 0.4849 - val_accuracy: 0.8630 - 285ms/epoch - 6ms/step Epoch 85/350 46/46 - 0s - loss: 0.5352 - accuracy: 0.8424 - val_loss: 0.4724 - val_accuracy: 0.8693 - 265ms/epoch - 6ms/step Epoch 86/350 46/46 - 0s - loss: 0.5768 - accuracy: 0.8263 - val_loss: 0.4696 - val_accuracy: 0.8657 - 259ms/epoch - 6ms/step Epoch 87/350 46/46 - 0s - loss: 0.5493 - accuracy: 0.8405 - val_loss: 0.5156 - val_accuracy: 0.8677 - 262ms/epoch - 6ms/step Epoch 88/350 46/46 - 0s - loss: 0.5374 - accuracy: 0.8405 - val_loss: 0.4564 - val_accuracy: 0.8753 - 259ms/epoch - 6ms/step Epoch 89/350 46/46 - 0s - loss: 0.5292 - accuracy: 0.8420 - val_loss: 0.4756 - val_accuracy: 0.8690 - 263ms/epoch - 6ms/step Epoch 90/350 46/46 - 0s - loss: 0.5109 - accuracy: 0.8479 - val_loss: 0.4803 - val_accuracy: 0.8663 - 262ms/epoch - 6ms/step Epoch 91/350 46/46 - 0s - loss: 0.4832 - accuracy: 0.8549 - val_loss: 0.4472 - val_accuracy: 0.8770 - 264ms/epoch - 6ms/step Epoch 92/350 46/46 - 0s - loss: 0.5517 - accuracy: 0.8346 - val_loss: 0.4672 - val_accuracy: 0.8733 - 260ms/epoch - 6ms/step Epoch 93/350 46/46 - 0s - loss: 0.5339 - accuracy: 0.8383 - val_loss: 0.4782 - val_accuracy: 0.8707 - 260ms/epoch - 6ms/step Epoch 94/350 46/46 - 0s - loss: 0.5006 - accuracy: 0.8557 - val_loss: 0.4433 - val_accuracy: 0.8763 - 260ms/epoch - 6ms/step Epoch 95/350 46/46 - 0s - loss: 0.5155 - accuracy: 0.8487 - val_loss: 0.4633 - val_accuracy: 0.8743 - 284ms/epoch - 6ms/step Epoch 96/350 46/46 - 0s - loss: 0.5190 - accuracy: 0.8475 - val_loss: 0.4290 - val_accuracy: 0.8823 - 258ms/epoch - 6ms/step Epoch 97/350 46/46 - 0s - loss: 0.5114 - accuracy: 0.8487 - val_loss: 0.4497 - val_accuracy: 0.8727 - 259ms/epoch - 6ms/step Epoch 98/350 46/46 - 0s - loss: 0.4828 - accuracy: 0.8543 - val_loss: 0.4127 - val_accuracy: 0.8863 - 261ms/epoch - 6ms/step Epoch 99/350 46/46 - 0s - loss: 0.4861 - accuracy: 0.8535 - val_loss: 0.4453 - val_accuracy: 0.8800 - 266ms/epoch - 6ms/step Epoch 100/350 46/46 - 0s - loss: 0.4744 - accuracy: 0.8543 - val_loss: 0.5469 - val_accuracy: 0.8510 - 270ms/epoch - 6ms/step Epoch 101/350 46/46 - 0s - loss: 0.4869 - accuracy: 0.8554 - val_loss: 0.4297 - val_accuracy: 0.8847 - 258ms/epoch - 6ms/step Epoch 102/350 46/46 - 0s - loss: 0.4697 - accuracy: 0.8598 - val_loss: 0.4597 - val_accuracy: 0.8810 - 267ms/epoch - 6ms/step Epoch 103/350 46/46 - 0s - loss: 0.4977 - accuracy: 0.8533 - val_loss: 0.4290 - val_accuracy: 0.8850 - 264ms/epoch - 6ms/step Epoch 104/350 46/46 - 0s - loss: 0.4758 - accuracy: 0.8551 - val_loss: 0.4216 - val_accuracy: 0.8890 - 263ms/epoch - 6ms/step Epoch 105/350 46/46 - 0s - loss: 0.4579 - accuracy: 0.8645 - val_loss: 0.4149 - val_accuracy: 0.8870 - 264ms/epoch - 6ms/step Epoch 106/350 46/46 - 0s - loss: 0.4617 - accuracy: 0.8628 - val_loss: 0.4330 - val_accuracy: 0.8837 - 261ms/epoch - 6ms/step Epoch 107/350 46/46 - 0s - loss: 0.5064 - accuracy: 0.8514 - val_loss: 0.4291 - val_accuracy: 0.8870 - 262ms/epoch - 6ms/step Epoch 108/350 46/46 - 0s - loss: 0.4745 - accuracy: 0.8578 - val_loss: 0.4891 - val_accuracy: 0.8703 - 255ms/epoch - 6ms/step Epoch 109/350 46/46 - 0s - loss: 0.4779 - accuracy: 0.8648 - val_loss: 0.4175 - val_accuracy: 0.8890 - 260ms/epoch - 6ms/step Epoch 110/350 46/46 - 0s - loss: 0.4796 - accuracy: 0.8623 - val_loss: 0.4704 - val_accuracy: 0.8693 - 275ms/epoch - 6ms/step Epoch 111/350 46/46 - 0s - loss: 0.4575 - accuracy: 0.8604 - val_loss: 0.4218 - val_accuracy: 0.8867 - 263ms/epoch - 6ms/step Epoch 112/350 46/46 - 0s - loss: 0.4762 - accuracy: 0.8591 - val_loss: 0.4252 - val_accuracy: 0.8847 - 258ms/epoch - 6ms/step Epoch 113/350 46/46 - 0s - loss: 0.4554 - accuracy: 0.8655 - val_loss: 0.4239 - val_accuracy: 0.8840 - 259ms/epoch - 6ms/step Epoch 114/350 46/46 - 0s - loss: 0.4659 - accuracy: 0.8611 - val_loss: 0.4570 - val_accuracy: 0.8803 - 260ms/epoch - 6ms/step Epoch 115/350 46/46 - 0s - loss: 0.4510 - accuracy: 0.8664 - val_loss: 0.4409 - val_accuracy: 0.8847 - 257ms/epoch - 6ms/step Epoch 116/350 46/46 - 0s - loss: 0.4619 - accuracy: 0.8614 - val_loss: 0.4310 - val_accuracy: 0.8843 - 257ms/epoch - 6ms/step Epoch 117/350 46/46 - 0s - loss: 0.4713 - accuracy: 0.8646 - val_loss: 0.4075 - val_accuracy: 0.8920 - 263ms/epoch - 6ms/step Epoch 118/350 46/46 - 0s - loss: 0.4495 - accuracy: 0.8642 - val_loss: 0.4761 - val_accuracy: 0.8720 - 255ms/epoch - 6ms/step Epoch 119/350 46/46 - 0s - loss: 0.4626 - accuracy: 0.8651 - val_loss: 0.4053 - val_accuracy: 0.8877 - 262ms/epoch - 6ms/step Epoch 120/350 46/46 - 0s - loss: 0.4464 - accuracy: 0.8708 - val_loss: 0.4233 - val_accuracy: 0.8883 - 257ms/epoch - 6ms/step Epoch 121/350 46/46 - 0s - loss: 0.4337 - accuracy: 0.8720 - val_loss: 0.4256 - val_accuracy: 0.8827 - 260ms/epoch - 6ms/step Epoch 122/350 46/46 - 0s - loss: 0.4488 - accuracy: 0.8711 - val_loss: 0.4240 - val_accuracy: 0.8860 - 263ms/epoch - 6ms/step Epoch 123/350 46/46 - 0s - loss: 0.4668 - accuracy: 0.8634 - val_loss: 0.4189 - val_accuracy: 0.8873 - 258ms/epoch - 6ms/step Epoch 124/350 46/46 - 0s - loss: 0.4609 - accuracy: 0.8654 - val_loss: 0.4133 - val_accuracy: 0.8927 - 254ms/epoch - 6ms/step Epoch 125/350 46/46 - 0s - loss: 0.4385 - accuracy: 0.8704 - val_loss: 0.4730 - val_accuracy: 0.8733 - 261ms/epoch - 6ms/step Epoch 126/350 46/46 - 0s - loss: 0.4435 - accuracy: 0.8720 - val_loss: 0.4021 - val_accuracy: 0.8900 - 265ms/epoch - 6ms/step Epoch 127/350 46/46 - 0s - loss: 0.4545 - accuracy: 0.8661 - val_loss: 0.4131 - val_accuracy: 0.8897 - 260ms/epoch - 6ms/step Epoch 128/350 46/46 - 0s - loss: 0.4372 - accuracy: 0.8730 - val_loss: 0.4424 - val_accuracy: 0.8850 - 261ms/epoch - 6ms/step Epoch 129/350 46/46 - 0s - loss: 0.4578 - accuracy: 0.8667 - val_loss: 0.4032 - val_accuracy: 0.8933 - 269ms/epoch - 6ms/step Epoch 130/350 46/46 - 0s - loss: 0.4086 - accuracy: 0.8798 - val_loss: 0.4092 - val_accuracy: 0.8903 - 259ms/epoch - 6ms/step Epoch 131/350 46/46 - 0s - loss: 0.4418 - accuracy: 0.8703 - val_loss: 0.4652 - val_accuracy: 0.8790 - 257ms/epoch - 6ms/step Epoch 132/350 46/46 - 0s - loss: 0.4344 - accuracy: 0.8728 - val_loss: 0.4780 - val_accuracy: 0.8737 - 262ms/epoch - 6ms/step Epoch 133/350 46/46 - 0s - loss: 0.4089 - accuracy: 0.8808 - val_loss: 0.4567 - val_accuracy: 0.8767 - 261ms/epoch - 6ms/step Epoch 134/350 46/46 - 0s - loss: 0.4437 - accuracy: 0.8708 - val_loss: 0.4129 - val_accuracy: 0.8893 - 257ms/epoch - 6ms/step Epoch 135/350 46/46 - 0s - loss: 0.4421 - accuracy: 0.8726 - val_loss: 0.4518 - val_accuracy: 0.8847 - 257ms/epoch - 6ms/step Epoch 136/350 46/46 - 0s - loss: 0.4155 - accuracy: 0.8799 - val_loss: 0.4168 - val_accuracy: 0.8897 - 262ms/epoch - 6ms/step Epoch 137/350 46/46 - 0s - loss: 0.4062 - accuracy: 0.8810 - val_loss: 0.3879 - val_accuracy: 0.8967 - 260ms/epoch - 6ms/step Epoch 138/350 46/46 - 0s - loss: 0.4131 - accuracy: 0.8795 - val_loss: 0.3896 - val_accuracy: 0.8997 - 261ms/epoch - 6ms/step Epoch 139/350 46/46 - 0s - loss: 0.4178 - accuracy: 0.8802 - val_loss: 0.4348 - val_accuracy: 0.8790 - 257ms/epoch - 6ms/step Epoch 140/350 46/46 - 0s - loss: 0.4340 - accuracy: 0.8780 - val_loss: 0.4241 - val_accuracy: 0.8903 - 260ms/epoch - 6ms/step Epoch 141/350 46/46 - 0s - loss: 0.4030 - accuracy: 0.8831 - val_loss: 0.4326 - val_accuracy: 0.8873 - 259ms/epoch - 6ms/step Epoch 142/350 46/46 - 0s - loss: 0.4193 - accuracy: 0.8756 - val_loss: 0.4401 - val_accuracy: 0.8817 - 256ms/epoch - 6ms/step Epoch 143/350 46/46 - 0s - loss: 0.4104 - accuracy: 0.8767 - val_loss: 0.4116 - val_accuracy: 0.8947 - 257ms/epoch - 6ms/step Epoch 144/350 46/46 - 0s - loss: 0.4114 - accuracy: 0.8808 - val_loss: 0.4063 - val_accuracy: 0.8890 - 257ms/epoch - 6ms/step Epoch 145/350 46/46 - 0s - loss: 0.4200 - accuracy: 0.8751 - val_loss: 0.4228 - val_accuracy: 0.8960 - 257ms/epoch - 6ms/step Epoch 146/350 46/46 - 0s - loss: 0.4193 - accuracy: 0.8764 - val_loss: 0.4261 - val_accuracy: 0.8900 - 258ms/epoch - 6ms/step Epoch 147/350 46/46 - 0s - loss: 0.4083 - accuracy: 0.8785 - val_loss: 0.3998 - val_accuracy: 0.8960 - 263ms/epoch - 6ms/step Epoch 148/350 46/46 - 0s - loss: 0.4102 - accuracy: 0.8794 - val_loss: 0.4381 - val_accuracy: 0.8810 - 260ms/epoch - 6ms/step Epoch 149/350 46/46 - 0s - loss: 0.4371 - accuracy: 0.8769 - val_loss: 0.4038 - val_accuracy: 0.8947 - 277ms/epoch - 6ms/step Epoch 150/350 46/46 - 0s - loss: 0.4210 - accuracy: 0.8743 - val_loss: 0.4274 - val_accuracy: 0.8833 - 261ms/epoch - 6ms/step Epoch 151/350 46/46 - 0s - loss: 0.4152 - accuracy: 0.8792 - val_loss: 0.4303 - val_accuracy: 0.8847 - 269ms/epoch - 6ms/step Epoch 152/350 46/46 - 0s - loss: 0.4750 - accuracy: 0.8621 - val_loss: 0.4072 - val_accuracy: 0.8903 - 263ms/epoch - 6ms/step Epoch 153/350 46/46 - 0s - loss: 0.4174 - accuracy: 0.8772 - val_loss: 0.4040 - val_accuracy: 0.8947 - 263ms/epoch - 6ms/step Epoch 154/350 46/46 - 0s - loss: 0.4090 - accuracy: 0.8798 - val_loss: 0.4877 - val_accuracy: 0.8703 - 260ms/epoch - 6ms/step Epoch 155/350 46/46 - 0s - loss: 0.4592 - accuracy: 0.8649 - val_loss: 0.4289 - val_accuracy: 0.8813 - 266ms/epoch - 6ms/step Epoch 156/350 46/46 - 0s - loss: 0.4070 - accuracy: 0.8825 - val_loss: 0.4091 - val_accuracy: 0.8923 - 261ms/epoch - 6ms/step Epoch 157/350 46/46 - 0s - loss: 0.4106 - accuracy: 0.8776 - val_loss: 0.4105 - val_accuracy: 0.8923 - 270ms/epoch - 6ms/step Epoch 158/350 46/46 - 0s - loss: 0.3932 - accuracy: 0.8871 - val_loss: 0.3968 - val_accuracy: 0.8937 - 263ms/epoch - 6ms/step Epoch 159/350 46/46 - 0s - loss: 0.3945 - accuracy: 0.8846 - val_loss: 0.3977 - val_accuracy: 0.8943 - 269ms/epoch - 6ms/step Epoch 160/350 46/46 - 0s - loss: 0.3881 - accuracy: 0.8893 - val_loss: 0.4059 - val_accuracy: 0.8867 - 262ms/epoch - 6ms/step Epoch 161/350 46/46 - 0s - loss: 0.3781 - accuracy: 0.8846 - val_loss: 0.3824 - val_accuracy: 0.9060 - 266ms/epoch - 6ms/step Epoch 162/350 46/46 - 0s - loss: 0.3959 - accuracy: 0.8859 - val_loss: 0.4257 - val_accuracy: 0.8903 - 259ms/epoch - 6ms/step Epoch 163/350 46/46 - 0s - loss: 0.3914 - accuracy: 0.8888 - val_loss: 0.4240 - val_accuracy: 0.8853 - 261ms/epoch - 6ms/step Epoch 164/350 46/46 - 0s - loss: 0.3868 - accuracy: 0.8905 - val_loss: 0.3882 - val_accuracy: 0.8983 - 264ms/epoch - 6ms/step Epoch 165/350 46/46 - 0s - loss: 0.3931 - accuracy: 0.8865 - val_loss: 0.5230 - val_accuracy: 0.8643 - 267ms/epoch - 6ms/step Epoch 166/350 46/46 - 0s - loss: 0.4029 - accuracy: 0.8808 - val_loss: 0.3885 - val_accuracy: 0.8960 - 270ms/epoch - 6ms/step Epoch 167/350 46/46 - 0s - loss: 0.3794 - accuracy: 0.8897 - val_loss: 0.3853 - val_accuracy: 0.9027 - 267ms/epoch - 6ms/step Epoch 168/350 46/46 - 0s - loss: 0.3729 - accuracy: 0.8919 - val_loss: 0.3798 - val_accuracy: 0.9077 - 272ms/epoch - 6ms/step Epoch 169/350 46/46 - 0s - loss: 0.3757 - accuracy: 0.8924 - val_loss: 0.4181 - val_accuracy: 0.8920 - 271ms/epoch - 6ms/step Epoch 170/350 46/46 - 0s - loss: 0.3822 - accuracy: 0.8866 - val_loss: 0.4021 - val_accuracy: 0.8960 - 271ms/epoch - 6ms/step Epoch 171/350 46/46 - 0s - loss: 0.3655 - accuracy: 0.8947 - val_loss: 0.3848 - val_accuracy: 0.8950 - 274ms/epoch - 6ms/step Epoch 172/350 46/46 - 0s - loss: 0.3708 - accuracy: 0.8875 - val_loss: 0.4120 - val_accuracy: 0.8980 - 279ms/epoch - 6ms/step Epoch 173/350 46/46 - 0s - loss: 0.3780 - accuracy: 0.8909 - val_loss: 0.4001 - val_accuracy: 0.9010 - 275ms/epoch - 6ms/step Epoch 174/350 46/46 - 0s - loss: 0.3683 - accuracy: 0.8931 - val_loss: 0.4558 - val_accuracy: 0.8863 - 268ms/epoch - 6ms/step Epoch 175/350 46/46 - 0s - loss: 0.4167 - accuracy: 0.8790 - val_loss: 0.3846 - val_accuracy: 0.9007 - 267ms/epoch - 6ms/step Epoch 176/350 46/46 - 0s - loss: 0.3788 - accuracy: 0.8897 - val_loss: 0.4310 - val_accuracy: 0.8877 - 258ms/epoch - 6ms/step Epoch 177/350 46/46 - 0s - loss: 0.3849 - accuracy: 0.8865 - val_loss: 0.3781 - val_accuracy: 0.9000 - 269ms/epoch - 6ms/step Epoch 178/350 46/46 - 0s - loss: 0.4045 - accuracy: 0.8874 - val_loss: 0.4145 - val_accuracy: 0.8930 - 262ms/epoch - 6ms/step Epoch 179/350 46/46 - 0s - loss: 0.3617 - accuracy: 0.8938 - val_loss: 0.3910 - val_accuracy: 0.9027 - 261ms/epoch - 6ms/step Epoch 180/350 46/46 - 0s - loss: 0.4019 - accuracy: 0.8810 - val_loss: 0.3767 - val_accuracy: 0.9013 - 259ms/epoch - 6ms/step Epoch 181/350 46/46 - 0s - loss: 0.3762 - accuracy: 0.8892 - val_loss: 0.4118 - val_accuracy: 0.8883 - 263ms/epoch - 6ms/step Epoch 182/350 46/46 - 0s - loss: 0.3694 - accuracy: 0.8931 - val_loss: 0.3936 - val_accuracy: 0.8940 - 275ms/epoch - 6ms/step Epoch 183/350 46/46 - 0s - loss: 0.3755 - accuracy: 0.8874 - val_loss: 0.4147 - val_accuracy: 0.8900 - 261ms/epoch - 6ms/step Epoch 184/350 46/46 - 0s - loss: 0.3855 - accuracy: 0.8918 - val_loss: 0.3992 - val_accuracy: 0.8940 - 261ms/epoch - 6ms/step Epoch 185/350 46/46 - 0s - loss: 0.3671 - accuracy: 0.8968 - val_loss: 0.3854 - val_accuracy: 0.8983 - 263ms/epoch - 6ms/step Epoch 186/350 46/46 - 0s - loss: 0.3668 - accuracy: 0.8952 - val_loss: 0.3855 - val_accuracy: 0.9003 - 260ms/epoch - 6ms/step Epoch 187/350 46/46 - 0s - loss: 0.3730 - accuracy: 0.8959 - val_loss: 0.3700 - val_accuracy: 0.8987 - 259ms/epoch - 6ms/step Epoch 188/350 46/46 - 0s - loss: 0.3608 - accuracy: 0.8989 - val_loss: 0.3953 - val_accuracy: 0.8953 - 261ms/epoch - 6ms/step Epoch 189/350 46/46 - 0s - loss: 0.3608 - accuracy: 0.8950 - val_loss: 0.3754 - val_accuracy: 0.8997 - 265ms/epoch - 6ms/step Epoch 190/350 46/46 - 0s - loss: 0.3780 - accuracy: 0.8957 - val_loss: 0.3857 - val_accuracy: 0.8943 - 262ms/epoch - 6ms/step Epoch 191/350 46/46 - 0s - loss: 0.3689 - accuracy: 0.8954 - val_loss: 0.4086 - val_accuracy: 0.8923 - 256ms/epoch - 6ms/step Epoch 192/350 46/46 - 0s - loss: 0.3898 - accuracy: 0.8870 - val_loss: 0.4272 - val_accuracy: 0.8917 - 263ms/epoch - 6ms/step Epoch 193/350 46/46 - 0s - loss: 0.3800 - accuracy: 0.8897 - val_loss: 0.3914 - val_accuracy: 0.8937 - 263ms/epoch - 6ms/step Epoch 194/350 46/46 - 0s - loss: 0.3879 - accuracy: 0.8889 - val_loss: 0.3835 - val_accuracy: 0.8993 - 262ms/epoch - 6ms/step Epoch 195/350 46/46 - 0s - loss: 0.3633 - accuracy: 0.8961 - val_loss: 0.3742 - val_accuracy: 0.9030 - 261ms/epoch - 6ms/step Epoch 196/350 46/46 - 0s - loss: 0.3710 - accuracy: 0.8955 - val_loss: 0.3768 - val_accuracy: 0.9020 - 263ms/epoch - 6ms/step Epoch 197/350 46/46 - 0s - loss: 0.3688 - accuracy: 0.8910 - val_loss: 0.3625 - val_accuracy: 0.9103 - 260ms/epoch - 6ms/step Epoch 198/350 46/46 - 0s - loss: 0.3612 - accuracy: 0.8960 - val_loss: 0.4077 - val_accuracy: 0.8930 - 263ms/epoch - 6ms/step Epoch 199/350 46/46 - 0s - loss: 0.3702 - accuracy: 0.8949 - val_loss: 0.4507 - val_accuracy: 0.8807 - 273ms/epoch - 6ms/step Epoch 200/350 46/46 - 0s - loss: 0.3700 - accuracy: 0.8924 - val_loss: 0.3987 - val_accuracy: 0.8943 - 265ms/epoch - 6ms/step Epoch 201/350 46/46 - 0s - loss: 0.3265 - accuracy: 0.9053 - val_loss: 0.3953 - val_accuracy: 0.8987 - 258ms/epoch - 6ms/step Epoch 202/350 46/46 - 0s - loss: 0.3349 - accuracy: 0.9039 - val_loss: 0.3983 - val_accuracy: 0.8963 - 278ms/epoch - 6ms/step Epoch 203/350 46/46 - 0s - loss: 0.3364 - accuracy: 0.9035 - val_loss: 0.3795 - val_accuracy: 0.9060 - 288ms/epoch - 6ms/step Epoch 204/350 46/46 - 0s - loss: 0.3433 - accuracy: 0.8985 - val_loss: 0.4432 - val_accuracy: 0.8850 - 279ms/epoch - 6ms/step Epoch 205/350 46/46 - 0s - loss: 0.3537 - accuracy: 0.8954 - val_loss: 0.3842 - val_accuracy: 0.9037 - 267ms/epoch - 6ms/step Epoch 206/350 46/46 - 0s - loss: 0.3829 - accuracy: 0.8920 - val_loss: 0.4297 - val_accuracy: 0.8910 - 256ms/epoch - 6ms/step Epoch 207/350 46/46 - 0s - loss: 0.3404 - accuracy: 0.9027 - val_loss: 0.3962 - val_accuracy: 0.8913 - 262ms/epoch - 6ms/step Epoch 208/350 46/46 - 0s - loss: 0.3508 - accuracy: 0.8978 - val_loss: 0.4120 - val_accuracy: 0.8873 - 260ms/epoch - 6ms/step Epoch 209/350 46/46 - 0s - loss: 0.3404 - accuracy: 0.9002 - val_loss: 0.3919 - val_accuracy: 0.9033 - 262ms/epoch - 6ms/step Epoch 210/350 46/46 - 0s - loss: 0.3439 - accuracy: 0.9016 - val_loss: 0.4032 - val_accuracy: 0.8983 - 259ms/epoch - 6ms/step Epoch 211/350 46/46 - 0s - loss: 0.3387 - accuracy: 0.9000 - val_loss: 0.3702 - val_accuracy: 0.9003 - 265ms/epoch - 6ms/step Epoch 212/350 46/46 - 0s - loss: 0.3378 - accuracy: 0.9016 - val_loss: 0.3699 - val_accuracy: 0.9080 - 281ms/epoch - 6ms/step Epoch 213/350 46/46 - 0s - loss: 0.3760 - accuracy: 0.8876 - val_loss: 0.3798 - val_accuracy: 0.9013 - 302ms/epoch - 7ms/step Epoch 214/350 46/46 - 0s - loss: 0.3567 - accuracy: 0.8985 - val_loss: 0.4050 - val_accuracy: 0.8977 - 288ms/epoch - 6ms/step Epoch 215/350 46/46 - 0s - loss: 0.3738 - accuracy: 0.8888 - val_loss: 0.3724 - val_accuracy: 0.9033 - 279ms/epoch - 6ms/step Epoch 216/350 46/46 - 0s - loss: 0.3515 - accuracy: 0.8947 - val_loss: 0.3856 - val_accuracy: 0.9037 - 279ms/epoch - 6ms/step Epoch 217/350 46/46 - 0s - loss: 0.3556 - accuracy: 0.8994 - val_loss: 0.3873 - val_accuracy: 0.8977 - 276ms/epoch - 6ms/step Epoch 218/350 46/46 - 0s - loss: 0.3481 - accuracy: 0.9035 - val_loss: 0.3777 - val_accuracy: 0.9000 - 276ms/epoch - 6ms/step Epoch 219/350 46/46 - 0s - loss: 0.3438 - accuracy: 0.8994 - val_loss: 0.3655 - val_accuracy: 0.9017 - 274ms/epoch - 6ms/step Epoch 220/350 46/46 - 0s - loss: 0.3600 - accuracy: 0.8930 - val_loss: 0.3886 - val_accuracy: 0.8990 - 275ms/epoch - 6ms/step Epoch 221/350 46/46 - 0s - loss: 0.3421 - accuracy: 0.9049 - val_loss: 0.3601 - val_accuracy: 0.9077 - 263ms/epoch - 6ms/step Epoch 222/350 46/46 - 0s - loss: 0.3410 - accuracy: 0.9052 - val_loss: 0.4269 - val_accuracy: 0.8900 - 267ms/epoch - 6ms/step Epoch 223/350 46/46 - 0s - loss: 0.3515 - accuracy: 0.8972 - val_loss: 0.3626 - val_accuracy: 0.9030 - 261ms/epoch - 6ms/step Epoch 224/350 46/46 - 0s - loss: 0.3241 - accuracy: 0.9051 - val_loss: 0.3719 - val_accuracy: 0.9033 - 262ms/epoch - 6ms/step Epoch 225/350 46/46 - 0s - loss: 0.3269 - accuracy: 0.9106 - val_loss: 0.3627 - val_accuracy: 0.9103 - 267ms/epoch - 6ms/step Epoch 226/350 46/46 - 0s - loss: 0.3328 - accuracy: 0.9033 - val_loss: 0.4032 - val_accuracy: 0.8977 - 262ms/epoch - 6ms/step Epoch 227/350 46/46 - 0s - loss: 0.3397 - accuracy: 0.9023 - val_loss: 0.3810 - val_accuracy: 0.9077 - 261ms/epoch - 6ms/step Epoch 228/350 46/46 - 0s - loss: 0.3159 - accuracy: 0.9088 - val_loss: 0.3553 - val_accuracy: 0.9113 - 258ms/epoch - 6ms/step Epoch 229/350 46/46 - 0s - loss: 0.3325 - accuracy: 0.9078 - val_loss: 0.3678 - val_accuracy: 0.9010 - 264ms/epoch - 6ms/step Epoch 230/350 46/46 - 0s - loss: 0.3358 - accuracy: 0.9075 - val_loss: 0.3899 - val_accuracy: 0.8980 - 257ms/epoch - 6ms/step Epoch 231/350 46/46 - 0s - loss: 0.3176 - accuracy: 0.9072 - val_loss: 0.4139 - val_accuracy: 0.8993 - 268ms/epoch - 6ms/step Epoch 232/350 46/46 - 0s - loss: 0.3359 - accuracy: 0.9047 - val_loss: 0.3913 - val_accuracy: 0.9030 - 260ms/epoch - 6ms/step Epoch 233/350 46/46 - 0s - loss: 0.3417 - accuracy: 0.9044 - val_loss: 0.3848 - val_accuracy: 0.9037 - 259ms/epoch - 6ms/step Epoch 234/350 46/46 - 0s - loss: 0.3360 - accuracy: 0.9057 - val_loss: 0.3813 - val_accuracy: 0.9003 - 261ms/epoch - 6ms/step Epoch 235/350 46/46 - 0s - loss: 0.3346 - accuracy: 0.9030 - val_loss: 0.3618 - val_accuracy: 0.9040 - 261ms/epoch - 6ms/step Epoch 236/350 46/46 - 0s - loss: 0.3280 - accuracy: 0.9074 - val_loss: 0.4216 - val_accuracy: 0.8887 - 262ms/epoch - 6ms/step Epoch 237/350 46/46 - 0s - loss: 0.3229 - accuracy: 0.9065 - val_loss: 0.3657 - val_accuracy: 0.9083 - 260ms/epoch - 6ms/step Epoch 238/350 46/46 - 0s - loss: 0.3674 - accuracy: 0.8998 - val_loss: 0.3723 - val_accuracy: 0.9083 - 273ms/epoch - 6ms/step Epoch 239/350 46/46 - 0s - loss: 0.3313 - accuracy: 0.9016 - val_loss: 0.3841 - val_accuracy: 0.9063 - 263ms/epoch - 6ms/step Epoch 240/350 46/46 - 0s - loss: 0.3194 - accuracy: 0.9091 - val_loss: 0.3889 - val_accuracy: 0.9010 - 270ms/epoch - 6ms/step Epoch 241/350 46/46 - 0s - loss: 0.4101 - accuracy: 0.8839 - val_loss: 0.6152 - val_accuracy: 0.8380 - 267ms/epoch - 6ms/step Epoch 242/350 46/46 - 0s - loss: 0.5220 - accuracy: 0.8546 - val_loss: 0.3929 - val_accuracy: 0.8980 - 263ms/epoch - 6ms/step Epoch 243/350 46/46 - 0s - loss: 0.3372 - accuracy: 0.9012 - val_loss: 0.3458 - val_accuracy: 0.9133 - 258ms/epoch - 6ms/step Epoch 244/350 46/46 - 0s - loss: 0.3378 - accuracy: 0.9055 - val_loss: 0.3879 - val_accuracy: 0.9040 - 266ms/epoch - 6ms/step Epoch 245/350 46/46 - 0s - loss: 0.3195 - accuracy: 0.9086 - val_loss: 0.3414 - val_accuracy: 0.9150 - 266ms/epoch - 6ms/step Epoch 246/350 46/46 - 0s - loss: 0.3279 - accuracy: 0.9089 - val_loss: 0.3444 - val_accuracy: 0.9117 - 257ms/epoch - 6ms/step Epoch 247/350 46/46 - 0s - loss: 0.3039 - accuracy: 0.9144 - val_loss: 0.3367 - val_accuracy: 0.9110 - 259ms/epoch - 6ms/step Epoch 248/350 46/46 - 0s - loss: 0.3368 - accuracy: 0.9005 - val_loss: 0.3589 - val_accuracy: 0.9060 - 259ms/epoch - 6ms/step Epoch 249/350 46/46 - 0s - loss: 0.3056 - accuracy: 0.9154 - val_loss: 0.3740 - val_accuracy: 0.9027 - 280ms/epoch - 6ms/step Epoch 250/350 46/46 - 0s - loss: 0.3057 - accuracy: 0.9148 - val_loss: 0.3662 - val_accuracy: 0.9073 - 275ms/epoch - 6ms/step Epoch 251/350 46/46 - 0s - loss: 0.3145 - accuracy: 0.9118 - val_loss: 0.3566 - val_accuracy: 0.9087 - 272ms/epoch - 6ms/step Epoch 252/350 46/46 - 0s - loss: 0.3093 - accuracy: 0.9086 - val_loss: 0.3572 - val_accuracy: 0.9090 - 267ms/epoch - 6ms/step Epoch 253/350 46/46 - 0s - loss: 0.3183 - accuracy: 0.9060 - val_loss: 0.3503 - val_accuracy: 0.9100 - 261ms/epoch - 6ms/step Epoch 254/350 46/46 - 0s - loss: 0.3530 - accuracy: 0.8923 - val_loss: 0.3483 - val_accuracy: 0.9097 - 258ms/epoch - 6ms/step Epoch 255/350 46/46 - 0s - loss: 0.3117 - accuracy: 0.9121 - val_loss: 0.3854 - val_accuracy: 0.9057 - 268ms/epoch - 6ms/step Epoch 256/350 46/46 - 0s - loss: 0.3757 - accuracy: 0.8947 - val_loss: 0.3887 - val_accuracy: 0.8977 - 265ms/epoch - 6ms/step Epoch 257/350 46/46 - 0s - loss: 0.3284 - accuracy: 0.9044 - val_loss: 0.3682 - val_accuracy: 0.9040 - 259ms/epoch - 6ms/step Epoch 258/350 46/46 - 0s - loss: 0.3222 - accuracy: 0.9099 - val_loss: 0.3403 - val_accuracy: 0.9060 - 265ms/epoch - 6ms/step Epoch 259/350 46/46 - 0s - loss: 0.3464 - accuracy: 0.9027 - val_loss: 0.3900 - val_accuracy: 0.9000 - 275ms/epoch - 6ms/step Epoch 260/350 46/46 - 0s - loss: 0.3659 - accuracy: 0.8980 - val_loss: 0.3624 - val_accuracy: 0.9030 - 271ms/epoch - 6ms/step Epoch 261/350 46/46 - 0s - loss: 0.3182 - accuracy: 0.9085 - val_loss: 0.3900 - val_accuracy: 0.9030 - 265ms/epoch - 6ms/step Epoch 262/350 46/46 - 0s - loss: 0.3093 - accuracy: 0.9112 - val_loss: 0.3533 - val_accuracy: 0.9120 - 258ms/epoch - 6ms/step Epoch 263/350 46/46 - 0s - loss: 0.3268 - accuracy: 0.9083 - val_loss: 0.3799 - val_accuracy: 0.9050 - 265ms/epoch - 6ms/step Epoch 264/350 46/46 - 0s - loss: 0.3139 - accuracy: 0.9153 - val_loss: 0.3485 - val_accuracy: 0.9113 - 262ms/epoch - 6ms/step Epoch 265/350 46/46 - 0s - loss: 0.2892 - accuracy: 0.9174 - val_loss: 0.3625 - val_accuracy: 0.9063 - 262ms/epoch - 6ms/step Epoch 266/350 46/46 - 0s - loss: 0.3306 - accuracy: 0.9055 - val_loss: 0.3527 - val_accuracy: 0.9140 - 260ms/epoch - 6ms/step Epoch 267/350 46/46 - 0s - loss: 0.2809 - accuracy: 0.9195 - val_loss: 0.3587 - val_accuracy: 0.9140 - 262ms/epoch - 6ms/step Epoch 268/350 46/46 - 0s - loss: 0.2985 - accuracy: 0.9092 - val_loss: 0.3297 - val_accuracy: 0.9150 - 261ms/epoch - 6ms/step Epoch 269/350 46/46 - 0s - loss: 0.3032 - accuracy: 0.9148 - val_loss: 0.3547 - val_accuracy: 0.9103 - 263ms/epoch - 6ms/step Epoch 270/350 46/46 - 0s - loss: 0.3013 - accuracy: 0.9089 - val_loss: 0.3385 - val_accuracy: 0.9120 - 268ms/epoch - 6ms/step Epoch 271/350 46/46 - 0s - loss: 0.2946 - accuracy: 0.9155 - val_loss: 0.3516 - val_accuracy: 0.9133 - 267ms/epoch - 6ms/step Epoch 272/350 46/46 - 0s - loss: 0.3532 - accuracy: 0.9029 - val_loss: 0.3933 - val_accuracy: 0.8857 - 265ms/epoch - 6ms/step Epoch 273/350 46/46 - 0s - loss: 0.3326 - accuracy: 0.9036 - val_loss: 0.3711 - val_accuracy: 0.9053 - 270ms/epoch - 6ms/step Epoch 274/350 46/46 - 0s - loss: 0.3135 - accuracy: 0.9097 - val_loss: 0.3424 - val_accuracy: 0.9163 - 264ms/epoch - 6ms/step Epoch 275/350 46/46 - 0s - loss: 0.3006 - accuracy: 0.9174 - val_loss: 0.3435 - val_accuracy: 0.9157 - 267ms/epoch - 6ms/step Epoch 276/350 46/46 - 0s - loss: 0.3048 - accuracy: 0.9193 - val_loss: 0.3509 - val_accuracy: 0.9143 - 276ms/epoch - 6ms/step Epoch 277/350 46/46 - 0s - loss: 0.2998 - accuracy: 0.9145 - val_loss: 0.3431 - val_accuracy: 0.9130 - 286ms/epoch - 6ms/step Epoch 278/350 46/46 - 0s - loss: 0.3434 - accuracy: 0.9049 - val_loss: 0.3591 - val_accuracy: 0.9013 - 279ms/epoch - 6ms/step Epoch 279/350 46/46 - 0s - loss: 0.3039 - accuracy: 0.9142 - val_loss: 0.3287 - val_accuracy: 0.9190 - 273ms/epoch - 6ms/step Epoch 280/350 46/46 - 0s - loss: 0.3178 - accuracy: 0.9089 - val_loss: 0.3592 - val_accuracy: 0.9063 - 271ms/epoch - 6ms/step Epoch 281/350 46/46 - 0s - loss: 0.3170 - accuracy: 0.9097 - val_loss: 0.4152 - val_accuracy: 0.8907 - 283ms/epoch - 6ms/step Epoch 282/350 46/46 - 0s - loss: 0.3212 - accuracy: 0.9045 - val_loss: 0.3838 - val_accuracy: 0.8987 - 274ms/epoch - 6ms/step Epoch 283/350 46/46 - 0s - loss: 0.3076 - accuracy: 0.9133 - val_loss: 0.3732 - val_accuracy: 0.9073 - 276ms/epoch - 6ms/step Epoch 284/350 46/46 - 0s - loss: 0.3003 - accuracy: 0.9149 - val_loss: 0.3534 - val_accuracy: 0.9093 - 283ms/epoch - 6ms/step Epoch 285/350 46/46 - 0s - loss: 0.3165 - accuracy: 0.9122 - val_loss: 0.3721 - val_accuracy: 0.9073 - 277ms/epoch - 6ms/step Epoch 286/350 46/46 - 0s - loss: 0.3258 - accuracy: 0.9085 - val_loss: 0.3428 - val_accuracy: 0.9057 - 283ms/epoch - 6ms/step Epoch 287/350 46/46 - 0s - loss: 0.3177 - accuracy: 0.9103 - val_loss: 0.3427 - val_accuracy: 0.9137 - 269ms/epoch - 6ms/step Epoch 288/350 46/46 - 0s - loss: 0.3092 - accuracy: 0.9103 - val_loss: 0.3497 - val_accuracy: 0.9087 - 266ms/epoch - 6ms/step Epoch 289/350 46/46 - 0s - loss: 0.2852 - accuracy: 0.9173 - val_loss: 0.3583 - val_accuracy: 0.9117 - 264ms/epoch - 6ms/step Epoch 290/350 46/46 - 0s - loss: 0.3169 - accuracy: 0.9111 - val_loss: 0.4464 - val_accuracy: 0.8927 - 260ms/epoch - 6ms/step Epoch 291/350 46/46 - 0s - loss: 0.3706 - accuracy: 0.8947 - val_loss: 0.4179 - val_accuracy: 0.8903 - 261ms/epoch - 6ms/step Epoch 292/350 46/46 - 0s - loss: 0.2940 - accuracy: 0.9143 - val_loss: 0.3323 - val_accuracy: 0.9183 - 261ms/epoch - 6ms/step Epoch 293/350 46/46 - 0s - loss: 0.2941 - accuracy: 0.9163 - val_loss: 0.3728 - val_accuracy: 0.9063 - 281ms/epoch - 6ms/step Epoch 294/350 46/46 - 0s - loss: 0.3057 - accuracy: 0.9139 - val_loss: 0.3523 - val_accuracy: 0.9100 - 269ms/epoch - 6ms/step Epoch 295/350 46/46 - 0s - loss: 0.2778 - accuracy: 0.9215 - val_loss: 0.3296 - val_accuracy: 0.9190 - 261ms/epoch - 6ms/step Epoch 296/350 46/46 - 0s - loss: 0.2933 - accuracy: 0.9154 - val_loss: 0.3755 - val_accuracy: 0.9107 - 273ms/epoch - 6ms/step Epoch 297/350 46/46 - 0s - loss: 0.3672 - accuracy: 0.9013 - val_loss: 0.3507 - val_accuracy: 0.9107 - 276ms/epoch - 6ms/step Epoch 298/350 46/46 - 0s - loss: 0.3148 - accuracy: 0.9093 - val_loss: 0.3529 - val_accuracy: 0.9117 - 272ms/epoch - 6ms/step Epoch 299/350 46/46 - 0s - loss: 0.3007 - accuracy: 0.9134 - val_loss: 0.3620 - val_accuracy: 0.9037 - 271ms/epoch - 6ms/step Epoch 300/350 46/46 - 0s - loss: 0.3003 - accuracy: 0.9140 - val_loss: 0.3841 - val_accuracy: 0.9057 - 262ms/epoch - 6ms/step Epoch 301/350 46/46 - 0s - loss: 0.2893 - accuracy: 0.9233 - val_loss: 0.3465 - val_accuracy: 0.9147 - 260ms/epoch - 6ms/step Epoch 302/350 46/46 - 0s - loss: 0.2935 - accuracy: 0.9171 - val_loss: 0.3743 - val_accuracy: 0.8987 - 262ms/epoch - 6ms/step Epoch 303/350 46/46 - 0s - loss: 0.3661 - accuracy: 0.8961 - val_loss: 0.3390 - val_accuracy: 0.9127 - 266ms/epoch - 6ms/step Epoch 304/350 46/46 - 0s - loss: 0.3069 - accuracy: 0.9161 - val_loss: 0.3402 - val_accuracy: 0.9090 - 261ms/epoch - 6ms/step Epoch 305/350 46/46 - 0s - loss: 0.3099 - accuracy: 0.9127 - val_loss: 0.3410 - val_accuracy: 0.9147 - 267ms/epoch - 6ms/step Epoch 306/350 46/46 - 0s - loss: 0.3101 - accuracy: 0.9139 - val_loss: 0.3579 - val_accuracy: 0.9090 - 272ms/epoch - 6ms/step Epoch 307/350 46/46 - 0s - loss: 0.3227 - accuracy: 0.9098 - val_loss: 0.3440 - val_accuracy: 0.9113 - 270ms/epoch - 6ms/step Epoch 308/350 46/46 - 0s - loss: 0.2812 - accuracy: 0.9188 - val_loss: 0.3942 - val_accuracy: 0.9020 - 267ms/epoch - 6ms/step Epoch 309/350 46/46 - 0s - loss: 0.2740 - accuracy: 0.9237 - val_loss: 0.3571 - val_accuracy: 0.9117 - 270ms/epoch - 6ms/step Epoch 310/350 46/46 - 0s - loss: 0.2968 - accuracy: 0.9181 - val_loss: 0.3553 - val_accuracy: 0.9127 - 269ms/epoch - 6ms/step Epoch 311/350 46/46 - 0s - loss: 0.2913 - accuracy: 0.9187 - val_loss: 0.3420 - val_accuracy: 0.9123 - 268ms/epoch - 6ms/step Epoch 312/350 46/46 - 0s - loss: 0.2984 - accuracy: 0.9167 - val_loss: 0.3184 - val_accuracy: 0.9153 - 270ms/epoch - 6ms/step Epoch 313/350 46/46 - 0s - loss: 0.2954 - accuracy: 0.9174 - val_loss: 0.3269 - val_accuracy: 0.9157 - 275ms/epoch - 6ms/step Epoch 314/350 46/46 - 0s - loss: 0.2934 - accuracy: 0.9184 - val_loss: 0.3558 - val_accuracy: 0.9103 - 268ms/epoch - 6ms/step Epoch 315/350 46/46 - 0s - loss: 0.2781 - accuracy: 0.9189 - val_loss: 0.3498 - val_accuracy: 0.9080 - 262ms/epoch - 6ms/step Epoch 316/350 46/46 - 0s - loss: 0.2737 - accuracy: 0.9208 - val_loss: 0.3501 - val_accuracy: 0.9137 - 263ms/epoch - 6ms/step Epoch 317/350 46/46 - 0s - loss: 0.2602 - accuracy: 0.9270 - val_loss: 0.3427 - val_accuracy: 0.9137 - 262ms/epoch - 6ms/step Epoch 318/350 46/46 - 0s - loss: 0.2839 - accuracy: 0.9201 - val_loss: 0.3428 - val_accuracy: 0.9130 - 271ms/epoch - 6ms/step Epoch 319/350 46/46 - 0s - loss: 0.2905 - accuracy: 0.9197 - val_loss: 0.3565 - val_accuracy: 0.9073 - 274ms/epoch - 6ms/step Epoch 320/350 46/46 - 0s - loss: 0.3004 - accuracy: 0.9163 - val_loss: 0.3413 - val_accuracy: 0.9113 - 259ms/epoch - 6ms/step Epoch 321/350 46/46 - 0s - loss: 0.3201 - accuracy: 0.9134 - val_loss: 0.3710 - val_accuracy: 0.9040 - 262ms/epoch - 6ms/step Epoch 322/350 46/46 - 0s - loss: 0.3000 - accuracy: 0.9186 - val_loss: 0.3358 - val_accuracy: 0.9167 - 264ms/epoch - 6ms/step Epoch 323/350 46/46 - 0s - loss: 0.2746 - accuracy: 0.9229 - val_loss: 0.3237 - val_accuracy: 0.9170 - 264ms/epoch - 6ms/step Epoch 324/350 46/46 - 0s - loss: 0.2827 - accuracy: 0.9224 - val_loss: 0.3358 - val_accuracy: 0.9123 - 260ms/epoch - 6ms/step Epoch 325/350 46/46 - 0s - loss: 0.2788 - accuracy: 0.9212 - val_loss: 0.3734 - val_accuracy: 0.9020 - 267ms/epoch - 6ms/step Epoch 326/350 46/46 - 0s - loss: 0.2739 - accuracy: 0.9232 - val_loss: 0.3570 - val_accuracy: 0.9110 - 270ms/epoch - 6ms/step Epoch 327/350 46/46 - 0s - loss: 0.2917 - accuracy: 0.9166 - val_loss: 0.3361 - val_accuracy: 0.9157 - 269ms/epoch - 6ms/step Epoch 328/350 46/46 - 0s - loss: 0.2548 - accuracy: 0.9281 - val_loss: 0.3346 - val_accuracy: 0.9187 - 259ms/epoch - 6ms/step Epoch 329/350 46/46 - 0s - loss: 0.2985 - accuracy: 0.9161 - val_loss: 0.4006 - val_accuracy: 0.9007 - 264ms/epoch - 6ms/step Epoch 330/350 46/46 - 0s - loss: 0.3046 - accuracy: 0.9148 - val_loss: 0.3294 - val_accuracy: 0.9167 - 257ms/epoch - 6ms/step Epoch 331/350 46/46 - 0s - loss: 0.2618 - accuracy: 0.9270 - val_loss: 0.3667 - val_accuracy: 0.9090 - 262ms/epoch - 6ms/step Epoch 332/350 46/46 - 0s - loss: 0.3251 - accuracy: 0.9070 - val_loss: 0.3311 - val_accuracy: 0.9150 - 272ms/epoch - 6ms/step Epoch 333/350 46/46 - 0s - loss: 0.2826 - accuracy: 0.9216 - val_loss: 0.3546 - val_accuracy: 0.9103 - 280ms/epoch - 6ms/step Epoch 334/350 46/46 - 0s - loss: 0.2651 - accuracy: 0.9227 - val_loss: 0.3478 - val_accuracy: 0.9140 - 274ms/epoch - 6ms/step Epoch 335/350 46/46 - 0s - loss: 0.2837 - accuracy: 0.9177 - val_loss: 0.3315 - val_accuracy: 0.9153 - 277ms/epoch - 6ms/step Epoch 336/350 46/46 - 0s - loss: 0.2878 - accuracy: 0.9216 - val_loss: 0.3393 - val_accuracy: 0.9143 - 275ms/epoch - 6ms/step Epoch 337/350 46/46 - 0s - loss: 0.2842 - accuracy: 0.9237 - val_loss: 0.3657 - val_accuracy: 0.9087 - 264ms/epoch - 6ms/step Epoch 338/350 46/46 - 0s - loss: 0.2933 - accuracy: 0.9200 - val_loss: 0.3487 - val_accuracy: 0.9223 - 262ms/epoch - 6ms/step Epoch 339/350 46/46 - 0s - loss: 0.2846 - accuracy: 0.9176 - val_loss: 0.3697 - val_accuracy: 0.9113 - 263ms/epoch - 6ms/step Epoch 340/350 46/46 - 0s - loss: 0.2701 - accuracy: 0.9246 - val_loss: 0.3477 - val_accuracy: 0.9143 - 266ms/epoch - 6ms/step Epoch 341/350 46/46 - 0s - loss: 0.2785 - accuracy: 0.9227 - val_loss: 0.3757 - val_accuracy: 0.9103 - 263ms/epoch - 6ms/step Epoch 342/350 46/46 - 0s - loss: 0.2703 - accuracy: 0.9252 - val_loss: 0.3618 - val_accuracy: 0.9033 - 270ms/epoch - 6ms/step Epoch 343/350 46/46 - 0s - loss: 0.2968 - accuracy: 0.9206 - val_loss: 0.3711 - val_accuracy: 0.9117 - 263ms/epoch - 6ms/step Epoch 344/350 46/46 - 0s - loss: 0.2733 - accuracy: 0.9260 - val_loss: 0.3349 - val_accuracy: 0.9160 - 262ms/epoch - 6ms/step Epoch 345/350 46/46 - 0s - loss: 0.2723 - accuracy: 0.9246 - val_loss: 0.3298 - val_accuracy: 0.9210 - 272ms/epoch - 6ms/step Epoch 346/350 46/46 - 0s - loss: 0.2813 - accuracy: 0.9229 - val_loss: 0.3485 - val_accuracy: 0.9137 - 267ms/epoch - 6ms/step Epoch 347/350 46/46 - 0s - loss: 0.2804 - accuracy: 0.9228 - val_loss: 0.3561 - val_accuracy: 0.9090 - 264ms/epoch - 6ms/step Epoch 348/350 46/46 - 0s - loss: 0.3117 - accuracy: 0.9117 - val_loss: 0.3435 - val_accuracy: 0.9183 - 266ms/epoch - 6ms/step Epoch 349/350 46/46 - 0s - loss: 0.3076 - accuracy: 0.9163 - val_loss: 0.3280 - val_accuracy: 0.9170 - 276ms/epoch - 6ms/step Epoch 350/350 46/46 - 0s - loss: 0.2660 - accuracy: 0.9246 - val_loss: 0.4120 - val_accuracy: 0.9050 - 266ms/epoch - 6ms/step 94/94 - 0s - loss: 0.4120 - accuracy: 0.9050 - 370ms/epoch - 4ms/step Baseline Error: 9.50%
We can see that the model is being fitted very well with a high accuracy (over 90%) as well as there not being much overfitting. This a massive improvement from the original model, which can be seen below
model_31new.save('Goat31Model.h5') #save the model to evaluate on test data later on
df31
| Model Name | Train Accuracy | Validation Accuracy | Train Loss | Validation Loss | History | |
|---|---|---|---|---|---|---|
| 0 | Base 31 Model | 0.780128 | 0.537667 | 0.679253 | 1.505984 | <keras.callbacks.History object at 0x7f5a1785d... |
| 1 | LeNet | 0.728622 | 0.576000 | 0.878840 | 1.332178 | <keras.callbacks.History object at 0x7f5a0c5dd... |
| 2 | AlexNet | 0.105782 | 0.066667 | 2.637130 | 2.785964 | <keras.callbacks.History object at 0x7f5a0c3bb... |
| 3 | RegularizedModel | 0.963779 | 0.805333 | 0.181553 | 1.063044 | <keras.callbacks.History object at 0x7f59ec339... |
| 4 | RegularizedDOModel | 0.870403 | 0.790000 | 0.413026 | 1.056112 | <keras.callbacks.History object at 0x7f59ec0b8... |
| 5 | RegularizedDOModel2 | 0.946500 | 0.799000 | 0.193856 | 1.499866 | <keras.callbacks.History object at 0x7f598a934... |
| 6 | LearningRateDecayModel | 0.797630 | 0.763333 | 0.573027 | 0.902065 | <keras.callbacks.History object at 0x7f5960531... |
| 7 | ExtraRegularReducedModel | 0.868520 | 0.891333 | 0.445079 | 0.408669 | <keras.callbacks.History object at 0x7f59602f6... |
| 8 | NormalizedModel | 0.924568 | 0.905000 | 0.265987 | 0.412032 | <keras.callbacks.History object at 0x7f5971bb2... |
plt.figure(figsize=(15, 8))
for index, row in df31[['Model Name', 'History']].iterrows():
model_name = row['Model Name']
history = row['History']
sns.lineplot(x=range(1, len(history.history['val_accuracy']) + 1), y=history.history['val_accuracy'], label=f'{model_name} Validation')
plt.title('31x31 Model Accuracies Over Epochs')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
plt.figure(figsize=(15, 8))
for index, row in df31[['Model Name', 'History']].iterrows():
model_name = row['Model Name']
history = row['History']
accuracy_diff = [train_acc - val_acc for train_acc, val_acc in zip(history.history['accuracy'], history.history['val_accuracy'])]
sns.lineplot(x=range(1, len(accuracy_diff) + 1), y=accuracy_diff, label=f'{model_name}')
plt.title('Training Accuracy - Validation Accuracy Difference Over Epochs(31x31)')
plt.xlabel('Epochs')
plt.ylabel('Accuracy Difference (To see overfitting)')
plt.legend()
plt.show()
As we can see, the model with the least overfitting is the model with increased dropout layers as well as the model with batch normalization. If we are looking at which model has the most consistent performance, it can be said that the model with batch normalization is the best, from the val accuracy graph, we can also see that the model with batch normalization has the highest val accuracy and converges faster. However before we make our final judgement we can test both models on the test set and see which model performs better.
#convert tets to 31x31
X_test_resized = tf.image.resize(X_test, (31, 31))
X_test31 = X_test_resized.numpy().reshape(X_test_resized.shape[0], 31, 31, 1).astype('float32') / 255
test_labels = to_categorical(y_test)
#load the Model with Batch Normalization
model31_BN = models.load_model('Goat31Model.h5')
#evaluate the model on the test set
score = model31_BN.evaluate(X_test31, test_labels, verbose=0)
print("Test loss:", score[0])
print("Test accuracy:", score[1])
Test loss: 0.3477824330329895 Test accuracy: 0.9086666703224182
#load the other model for comparison
model31_reg = models.load_model('extraregularized31.h5')
#evaluate the model on the test set
score = model31_reg.evaluate(X_test31, test_labels, verbose=0)
print("Test loss:", score[0])
print("Test accuracy:", score[1])
Test loss: 0.39638611674308777 Test accuracy: 0.9003333449363708
Accuracy: Regularized Model has higher test accuracy compared to Batch Normalized Model.
Loss: Regularized Model has a lower test loss compared to Batch Normalized Model.
The Regularized Model performs better on the test set, having higher accuracy and lower loss than the Batch Normalized Model. Therefore, the Regularized Model is the preferred model between the two.
#Compare the Best model to our base model
base31model = models.load_model('Base31Model.h5')
score = base31model.evaluate(X_test31, test_labels, verbose=0)
print("Test loss:", score[0])
print("Test accuracy:", score[1])
Test loss: 1.5366089344024658 Test accuracy: 0.5286666750907898
As part of the assignment requirements, we are needed to do feature engineering and/or data augmentation. We can use data augmentation to balance the imbalanced data sets instead of setting class_weights. We can then train a model using this new "balanced data" and compare the results of setting class_weights vs using data augmentation.
What is Data Augmentation?
Data augmentation is a technique widely used in machine learning and computer vision to artificially increase the diversity of a training dataset by applying various transformations to the existing data. The goal is to create new variations of the original images while preserving their semantic content. This approach is particularly useful in image classification tasks.
How Does Data Augmentation Work?
Data augmentation involves applying a set of image transformations to the training data, effectively creating new instances that are variations of the original samples. Common transformations include:
By introducing these variations, the model becomes more robust and generalizes better to unseen data. Data augmentation helps prevent overfitting by exposing the model to a more diverse range of input patterns during training.
Why Use Data Augmentation?
def augmentor(x_batch):
datagen = ImageDataGenerator(
rotation_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
fill_mode='nearest'
)
# Specify the batch_size to match the size of x_batch
batch_size = len(x_batch)
x_batch_augmented = datagen.flow(x_batch, shuffle=False, batch_size=batch_size).next()
return x_batch_augmented
# Test the function on our 128x128 images
x_batch = X_train31[:3]
print(x_batch.shape)
x_batch_augmented = augmentor(x_batch)
# Print the original and augmented images
fig, axes = plt.subplots(2, 3, figsize=(15, 8))
# Original Images
for i in range(3):
ax = axes[0, i]
ax.imshow(x_batch[i].reshape(31, 31), cmap='gray')
ax.axis('off')
# Augmented Images
for i in range(3):
ax = axes[1, i]
ax.imshow(x_batch_augmented[i].reshape(31,31), cmap='gray')
ax.axis('off')
fig.suptitle('Original (Top) vs Augmented (Bottom) Images', fontsize=16)
plt.show()
(3, 31, 31, 1)
We recall the previous distribution of classes from the bar graph that we plotted:
We can use data augmentation to balance out the number of data entries
class_counts
# this is on the old training data
{'Cauliflower': 587,
'Cucumber': 812,
'Broccoli': 750,
'Bean': 780,
'Pumpkin': 814,
'Tomato': 955,
'Papaya': 566,
'Bottle_Gourd': 441,
'Cabbage': 503,
'Brinjal': 868,
'Capsicum': 351,
'Potato': 377,
'Radish': 248,
'Bitter_Gourd': 720,
'Carrot': 256}
As a form of feature engineering, i have removed the hands from the images in the training data. Hopefully this will make the data more "vegetable like" allowing the models to learn and differentiate vegetables better.
train_data = tf.keras.utils.image_dataset_from_directory(
'train_aug/',
labels='inferred',
label_mode='int',
class_names=None,
color_mode='grayscale',
batch_size = 200,
image_size=(224, 224),
shuffle=True,
seed=42,
validation_split=None,
subset=None,
interpolation='bilinear',
crop_to_aspect_ratio=False,
)
Found 8807 files belonging to 15 classes.
We can see that the class with the highest number of data entries is Tomato with 955 entries, let us generate a new dictionary with the amount of entries we need to "generate" in order to balance out the dataset.
#change the labels to be in int format
class_counts = {}
for images, labels in train_data:
for label in labels.numpy():
if label in class_counts:
class_counts[label] += 1
else:
class_counts[label] = 1
class_counts
2023-11-26 13:25:44.579894: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_4' with dtype int32 and shape [8807]
[[{{node Placeholder/_4}}]]
2023-11-26 13:25:44.580457: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype string and shape [8807]
[[{{node Placeholder/_0}}]]
{14: 955,
2: 420,
0: 741,
4: 736,
1: 701,
9: 750,
10: 563,
12: 811,
6: 346,
11: 373,
5: 501,
13: 247,
3: 826,
8: 581,
7: 256}
augmentation_dict = {class_name: 955 - class_count for class_name, class_count in class_counts.items()}
Compared to the original train data with hands, our engineered data has less data, however we can say that the quality of the data has gone up.
# Convert datasets to NumPy arrays
train_images, train_labels_augmented = zip(*[(image, label) for image, label in train_data.as_numpy_iterator()])
#train set
X_train_aug = np.concatenate(train_images, axis=0)
y_train = np.concatenate(train_labels_augmented, axis=0)
# Resize images to (31, 31) using TensorFlow
X_train_resized = tf.image.resize(X_train_aug, (31, 31))
# Reshape and normalize
X_train31_aug = X_train_resized.numpy().reshape(X_train_resized.shape[0], 31, 31, 1).astype('float32') / 255
def augmentfill(y_train, augmentation_dict, X_train_aug, augmentor=augmentor ):
augmented_data_list = []
for category_index in np.unique(y_train):
category_mask = (y_train == category_index)
selected_images = X_train_aug[category_mask]
# Ensure the category_index exists in augmentation_dict
if category_index in augmentation_dict:
# Check if there are samples in selected_images
if len(selected_images) > 0:
target_augmentation_count = augmentation_dict[category_index]
current_count = 0
while current_count < target_augmentation_count:
# Calculate how many images to take in this iteration
images_to_take = min(target_augmentation_count - current_count, len(selected_images))
print(images_to_take)
x_batch = selected_images[:images_to_take]
if len(x_batch) > 0:
x_batch_augmented = augmentor(x_batch)
augmented_data_list.append((x_batch_augmented, [category_index] * images_to_take))
current_count += images_to_take
print(f'Augmented {current_count} images out of {target_augmentation_count}')
else:
print(f'No augmentation needed to fill for {category_index}')
break
print(f'Category {category_index}: Augmented {current_count} images out of {target_augmentation_count}')
else:
print(f"No samples for category {category_index}")
else:
print(f"No augmentation specified for category {category_index}")
# Convert the augmented data list to NumPy arrays
augmented_data_images, augmented_data_labels = zip(*augmented_data_list)
# Convert augmented_data_labels to a flattened 1D array
flattened_augmented_labels = np.concatenate(augmented_data_labels, axis=0)
# Concatenate augmented data with the original data
X_train31_augmented = np.concatenate([X_train_aug] + list(augmented_data_images), axis=0)
y_train_augmented = np.concatenate([y_train] + [flattened_augmented_labels], axis=0)
return X_train31_augmented, y_train_augmented
X_train31_augmented, y_train_augmented = augmentfill(y_train, augmentation_dict, X_train31_aug)
214 Augmented 214 images out of 214 Category 0: Augmented 214 images out of 214 254 Augmented 254 images out of 254 Category 1: Augmented 254 images out of 254 420 Augmented 420 images out of 535 115 Augmented 535 images out of 535 Category 2: Augmented 535 images out of 535 129 Augmented 129 images out of 129 Category 3: Augmented 129 images out of 129 219 Augmented 219 images out of 219 Category 4: Augmented 219 images out of 219 454 Augmented 454 images out of 454 Category 5: Augmented 454 images out of 454 346 Augmented 346 images out of 609 263 Augmented 609 images out of 609 Category 6: Augmented 609 images out of 609 256 Augmented 256 images out of 699 256 Augmented 512 images out of 699 187 Augmented 699 images out of 699 Category 7: Augmented 699 images out of 699 374 Augmented 374 images out of 374 Category 8: Augmented 374 images out of 374 205 Augmented 205 images out of 205 Category 9: Augmented 205 images out of 205 392 Augmented 392 images out of 392 Category 10: Augmented 392 images out of 392 373 Augmented 373 images out of 582 209 Augmented 582 images out of 582 Category 11: Augmented 582 images out of 582 144 Augmented 144 images out of 144 Category 12: Augmented 144 images out of 144 247 Augmented 247 images out of 708 247 Augmented 494 images out of 708 214 Augmented 708 images out of 708 Category 13: Augmented 708 images out of 708 Category 14: Augmented 0 images out of 0
values, counts = np.unique(y_train_augmented, return_counts=True)
print(values)
print(counts)
[ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14] [955 955 955 955 955 955 955 955 955 955 955 955 955 955 955]
train_labels_augmented = to_categorical(y_train_augmented)
We can now start with the modeling, we will be trying variations of our top models on
def GoatModel(): #not to be confused with Goat31model, this is the model without batch norm
model_31do = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(31, 31, 1)),
layers.MaxPooling2D((2, 2)),
layers.Dropout(0.2),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Dropout(0.2),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Dropout(0.2),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dropout(0.5),
layers.Dense(15, activation='softmax')
])
return model_31do
df31augmentationcompare = pd.DataFrame(columns=['Model Name', 'Train Accuracy', 'test Accuracy', 'Val Loss', 'Val Loss', 'History'])
#with just weights
model = GoatModel()
model.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train31, train_labels, validation_data=(X_val31, validation_labels),
epochs=300, batch_size=200, verbose=2, class_weight = class_weight)
scores = model.evaluate(X_val31, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df31augmentationcompare.loc[len(df31augmentationcompare)] = ['With Class Weights', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/300
2023-11-26 13:25:51.725383: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Size of values 0 does not match size of permutation 4 @ fanin shape insequential_12/dropout_11/dropout/SelectV2-2-TransposeNHWCToNCHW-LayoutOptimizer
46/46 - 2s - loss: 3.2633 - accuracy: 0.1007 - val_loss: 2.8381 - val_accuracy: 0.0667 - 2s/epoch - 52ms/step Epoch 2/300 46/46 - 0s - loss: 3.1237 - accuracy: 0.1120 - val_loss: 2.7994 - val_accuracy: 0.0943 - 260ms/epoch - 6ms/step Epoch 3/300 46/46 - 0s - loss: 2.9289 - accuracy: 0.1595 - val_loss: 2.6474 - val_accuracy: 0.1303 - 253ms/epoch - 6ms/step Epoch 4/300 46/46 - 0s - loss: 2.7005 - accuracy: 0.2127 - val_loss: 2.4420 - val_accuracy: 0.1947 - 247ms/epoch - 5ms/step Epoch 5/300 46/46 - 0s - loss: 2.5595 - accuracy: 0.2463 - val_loss: 2.2509 - val_accuracy: 0.2713 - 248ms/epoch - 5ms/step Epoch 6/300 46/46 - 0s - loss: 2.3929 - accuracy: 0.2812 - val_loss: 2.1818 - val_accuracy: 0.2890 - 251ms/epoch - 5ms/step Epoch 7/300 46/46 - 0s - loss: 2.3074 - accuracy: 0.3107 - val_loss: 2.1674 - val_accuracy: 0.3053 - 248ms/epoch - 5ms/step Epoch 8/300 46/46 - 0s - loss: 2.2328 - accuracy: 0.3238 - val_loss: 2.0238 - val_accuracy: 0.3557 - 250ms/epoch - 5ms/step Epoch 9/300 46/46 - 0s - loss: 2.1702 - accuracy: 0.3480 - val_loss: 1.9389 - val_accuracy: 0.3893 - 246ms/epoch - 5ms/step Epoch 10/300 46/46 - 0s - loss: 2.0804 - accuracy: 0.3664 - val_loss: 1.9264 - val_accuracy: 0.3757 - 254ms/epoch - 6ms/step Epoch 11/300 46/46 - 0s - loss: 2.0221 - accuracy: 0.3848 - val_loss: 1.8172 - val_accuracy: 0.4137 - 243ms/epoch - 5ms/step Epoch 12/300 46/46 - 0s - loss: 1.9702 - accuracy: 0.3941 - val_loss: 1.7433 - val_accuracy: 0.4340 - 240ms/epoch - 5ms/step Epoch 13/300 46/46 - 0s - loss: 1.9429 - accuracy: 0.3980 - val_loss: 1.7278 - val_accuracy: 0.4370 - 244ms/epoch - 5ms/step Epoch 14/300 46/46 - 0s - loss: 1.8685 - accuracy: 0.4206 - val_loss: 1.7402 - val_accuracy: 0.4223 - 241ms/epoch - 5ms/step Epoch 15/300 46/46 - 0s - loss: 1.8718 - accuracy: 0.4221 - val_loss: 1.6568 - val_accuracy: 0.4587 - 240ms/epoch - 5ms/step Epoch 16/300 46/46 - 0s - loss: 1.8170 - accuracy: 0.4366 - val_loss: 1.5847 - val_accuracy: 0.4910 - 243ms/epoch - 5ms/step Epoch 17/300 46/46 - 0s - loss: 1.7809 - accuracy: 0.4496 - val_loss: 1.5992 - val_accuracy: 0.4943 - 238ms/epoch - 5ms/step Epoch 18/300 46/46 - 0s - loss: 1.7701 - accuracy: 0.4505 - val_loss: 1.5676 - val_accuracy: 0.5047 - 241ms/epoch - 5ms/step Epoch 19/300 46/46 - 0s - loss: 1.7424 - accuracy: 0.4526 - val_loss: 1.5745 - val_accuracy: 0.4803 - 240ms/epoch - 5ms/step Epoch 20/300 46/46 - 0s - loss: 1.7054 - accuracy: 0.4730 - val_loss: 1.4673 - val_accuracy: 0.5347 - 240ms/epoch - 5ms/step Epoch 21/300 46/46 - 0s - loss: 1.6427 - accuracy: 0.4832 - val_loss: 1.4325 - val_accuracy: 0.5460 - 236ms/epoch - 5ms/step Epoch 22/300 46/46 - 0s - loss: 1.6460 - accuracy: 0.4833 - val_loss: 1.4250 - val_accuracy: 0.5413 - 241ms/epoch - 5ms/step Epoch 23/300 46/46 - 0s - loss: 1.6023 - accuracy: 0.4945 - val_loss: 1.4021 - val_accuracy: 0.5503 - 242ms/epoch - 5ms/step Epoch 24/300 46/46 - 0s - loss: 1.6037 - accuracy: 0.4987 - val_loss: 1.4064 - val_accuracy: 0.5560 - 239ms/epoch - 5ms/step Epoch 25/300 46/46 - 0s - loss: 1.5658 - accuracy: 0.5055 - val_loss: 1.4155 - val_accuracy: 0.5410 - 239ms/epoch - 5ms/step Epoch 26/300 46/46 - 0s - loss: 1.5735 - accuracy: 0.5100 - val_loss: 1.3408 - val_accuracy: 0.5797 - 243ms/epoch - 5ms/step Epoch 27/300 46/46 - 0s - loss: 1.5390 - accuracy: 0.5183 - val_loss: 1.3584 - val_accuracy: 0.5740 - 242ms/epoch - 5ms/step Epoch 28/300 46/46 - 0s - loss: 1.5081 - accuracy: 0.5276 - val_loss: 1.3591 - val_accuracy: 0.5590 - 240ms/epoch - 5ms/step Epoch 29/300 46/46 - 0s - loss: 1.5069 - accuracy: 0.5258 - val_loss: 1.2855 - val_accuracy: 0.6097 - 238ms/epoch - 5ms/step Epoch 30/300 46/46 - 0s - loss: 1.4766 - accuracy: 0.5407 - val_loss: 1.2636 - val_accuracy: 0.6087 - 242ms/epoch - 5ms/step Epoch 31/300 46/46 - 0s - loss: 1.4698 - accuracy: 0.5394 - val_loss: 1.3152 - val_accuracy: 0.5943 - 252ms/epoch - 5ms/step Epoch 32/300 46/46 - 0s - loss: 1.4684 - accuracy: 0.5381 - val_loss: 1.3086 - val_accuracy: 0.5847 - 254ms/epoch - 6ms/step Epoch 33/300 46/46 - 0s - loss: 1.4574 - accuracy: 0.5450 - val_loss: 1.2305 - val_accuracy: 0.6050 - 259ms/epoch - 6ms/step Epoch 34/300 46/46 - 0s - loss: 1.4373 - accuracy: 0.5505 - val_loss: 1.1955 - val_accuracy: 0.6377 - 291ms/epoch - 6ms/step Epoch 35/300 46/46 - 0s - loss: 1.4157 - accuracy: 0.5573 - val_loss: 1.2175 - val_accuracy: 0.6117 - 271ms/epoch - 6ms/step Epoch 36/300 46/46 - 0s - loss: 1.3824 - accuracy: 0.5691 - val_loss: 1.2042 - val_accuracy: 0.6283 - 272ms/epoch - 6ms/step Epoch 37/300 46/46 - 0s - loss: 1.3953 - accuracy: 0.5713 - val_loss: 1.1963 - val_accuracy: 0.6207 - 252ms/epoch - 5ms/step Epoch 38/300 46/46 - 0s - loss: 1.3868 - accuracy: 0.5671 - val_loss: 1.2475 - val_accuracy: 0.6300 - 259ms/epoch - 6ms/step Epoch 39/300 46/46 - 0s - loss: 1.3602 - accuracy: 0.5749 - val_loss: 1.1489 - val_accuracy: 0.6343 - 253ms/epoch - 6ms/step Epoch 40/300 46/46 - 0s - loss: 1.3477 - accuracy: 0.5755 - val_loss: 1.1954 - val_accuracy: 0.6227 - 245ms/epoch - 5ms/step Epoch 41/300 46/46 - 0s - loss: 1.3312 - accuracy: 0.5831 - val_loss: 1.1082 - val_accuracy: 0.6460 - 255ms/epoch - 6ms/step Epoch 42/300 46/46 - 0s - loss: 1.3150 - accuracy: 0.5843 - val_loss: 1.1305 - val_accuracy: 0.6467 - 249ms/epoch - 5ms/step Epoch 43/300 46/46 - 0s - loss: 1.3277 - accuracy: 0.5879 - val_loss: 1.1050 - val_accuracy: 0.6697 - 240ms/epoch - 5ms/step Epoch 44/300 46/46 - 0s - loss: 1.2894 - accuracy: 0.5950 - val_loss: 1.0945 - val_accuracy: 0.6600 - 236ms/epoch - 5ms/step Epoch 45/300 46/46 - 0s - loss: 1.3236 - accuracy: 0.5879 - val_loss: 1.1060 - val_accuracy: 0.6560 - 238ms/epoch - 5ms/step Epoch 46/300 46/46 - 0s - loss: 1.3017 - accuracy: 0.6043 - val_loss: 1.0384 - val_accuracy: 0.6857 - 248ms/epoch - 5ms/step Epoch 47/300 46/46 - 0s - loss: 1.2585 - accuracy: 0.6095 - val_loss: 1.0386 - val_accuracy: 0.6800 - 241ms/epoch - 5ms/step Epoch 48/300 46/46 - 0s - loss: 1.2733 - accuracy: 0.6062 - val_loss: 1.0364 - val_accuracy: 0.6847 - 238ms/epoch - 5ms/step Epoch 49/300 46/46 - 0s - loss: 1.2632 - accuracy: 0.6038 - val_loss: 1.0420 - val_accuracy: 0.6870 - 241ms/epoch - 5ms/step Epoch 50/300 46/46 - 0s - loss: 1.2482 - accuracy: 0.6135 - val_loss: 1.0327 - val_accuracy: 0.6927 - 242ms/epoch - 5ms/step Epoch 51/300 46/46 - 0s - loss: 1.2764 - accuracy: 0.6117 - val_loss: 1.3639 - val_accuracy: 0.5673 - 251ms/epoch - 5ms/step Epoch 52/300 46/46 - 0s - loss: 1.2663 - accuracy: 0.6070 - val_loss: 0.9944 - val_accuracy: 0.6920 - 242ms/epoch - 5ms/step Epoch 53/300 46/46 - 0s - loss: 1.2330 - accuracy: 0.6194 - val_loss: 0.9808 - val_accuracy: 0.7053 - 245ms/epoch - 5ms/step Epoch 54/300 46/46 - 0s - loss: 1.2001 - accuracy: 0.6275 - val_loss: 0.9920 - val_accuracy: 0.7037 - 245ms/epoch - 5ms/step Epoch 55/300 46/46 - 0s - loss: 1.1951 - accuracy: 0.6251 - val_loss: 1.0116 - val_accuracy: 0.6797 - 243ms/epoch - 5ms/step Epoch 56/300 46/46 - 0s - loss: 1.1617 - accuracy: 0.6449 - val_loss: 0.9666 - val_accuracy: 0.7110 - 245ms/epoch - 5ms/step Epoch 57/300 46/46 - 0s - loss: 1.1791 - accuracy: 0.6346 - val_loss: 1.0042 - val_accuracy: 0.6910 - 244ms/epoch - 5ms/step Epoch 58/300 46/46 - 0s - loss: 1.1887 - accuracy: 0.6313 - val_loss: 0.9660 - val_accuracy: 0.7160 - 245ms/epoch - 5ms/step Epoch 59/300 46/46 - 0s - loss: 1.1641 - accuracy: 0.6380 - val_loss: 0.9585 - val_accuracy: 0.7037 - 242ms/epoch - 5ms/step Epoch 60/300 46/46 - 0s - loss: 1.1951 - accuracy: 0.6314 - val_loss: 1.0038 - val_accuracy: 0.7090 - 240ms/epoch - 5ms/step Epoch 61/300 46/46 - 0s - loss: 1.1488 - accuracy: 0.6448 - val_loss: 0.9085 - val_accuracy: 0.7260 - 237ms/epoch - 5ms/step Epoch 62/300 46/46 - 0s - loss: 1.1471 - accuracy: 0.6499 - val_loss: 0.9097 - val_accuracy: 0.7293 - 244ms/epoch - 5ms/step Epoch 63/300 46/46 - 0s - loss: 1.1719 - accuracy: 0.6413 - val_loss: 0.9388 - val_accuracy: 0.7227 - 247ms/epoch - 5ms/step Epoch 64/300 46/46 - 0s - loss: 1.1391 - accuracy: 0.6488 - val_loss: 0.9802 - val_accuracy: 0.6997 - 240ms/epoch - 5ms/step Epoch 65/300 46/46 - 0s - loss: 1.1279 - accuracy: 0.6493 - val_loss: 0.8956 - val_accuracy: 0.7283 - 244ms/epoch - 5ms/step Epoch 66/300 46/46 - 0s - loss: 1.1319 - accuracy: 0.6498 - val_loss: 0.9588 - val_accuracy: 0.7063 - 249ms/epoch - 5ms/step Epoch 67/300 46/46 - 0s - loss: 1.1065 - accuracy: 0.6576 - val_loss: 0.9161 - val_accuracy: 0.7210 - 245ms/epoch - 5ms/step Epoch 68/300 46/46 - 0s - loss: 1.1178 - accuracy: 0.6555 - val_loss: 0.8721 - val_accuracy: 0.7387 - 240ms/epoch - 5ms/step Epoch 69/300 46/46 - 0s - loss: 1.0927 - accuracy: 0.6611 - val_loss: 0.8943 - val_accuracy: 0.7343 - 240ms/epoch - 5ms/step Epoch 70/300 46/46 - 0s - loss: 1.0763 - accuracy: 0.6675 - val_loss: 0.9083 - val_accuracy: 0.7203 - 248ms/epoch - 5ms/step Epoch 71/300 46/46 - 0s - loss: 1.0705 - accuracy: 0.6578 - val_loss: 0.8815 - val_accuracy: 0.7443 - 244ms/epoch - 5ms/step Epoch 72/300 46/46 - 0s - loss: 1.0701 - accuracy: 0.6666 - val_loss: 0.8490 - val_accuracy: 0.7503 - 243ms/epoch - 5ms/step Epoch 73/300 46/46 - 0s - loss: 1.0871 - accuracy: 0.6712 - val_loss: 0.8685 - val_accuracy: 0.7393 - 242ms/epoch - 5ms/step Epoch 74/300 46/46 - 0s - loss: 1.0632 - accuracy: 0.6710 - val_loss: 0.8806 - val_accuracy: 0.7360 - 244ms/epoch - 5ms/step Epoch 75/300 46/46 - 0s - loss: 1.0507 - accuracy: 0.6729 - val_loss: 0.9249 - val_accuracy: 0.7200 - 238ms/epoch - 5ms/step Epoch 76/300 46/46 - 0s - loss: 1.1020 - accuracy: 0.6584 - val_loss: 0.8573 - val_accuracy: 0.7420 - 241ms/epoch - 5ms/step Epoch 77/300 46/46 - 0s - loss: 1.0304 - accuracy: 0.6793 - val_loss: 0.8452 - val_accuracy: 0.7480 - 238ms/epoch - 5ms/step Epoch 78/300 46/46 - 0s - loss: 1.0487 - accuracy: 0.6799 - val_loss: 0.8666 - val_accuracy: 0.7433 - 246ms/epoch - 5ms/step Epoch 79/300 46/46 - 0s - loss: 1.0663 - accuracy: 0.6674 - val_loss: 0.8462 - val_accuracy: 0.7493 - 241ms/epoch - 5ms/step Epoch 80/300 46/46 - 0s - loss: 1.0278 - accuracy: 0.6899 - val_loss: 0.8399 - val_accuracy: 0.7523 - 237ms/epoch - 5ms/step Epoch 81/300 46/46 - 0s - loss: 1.0188 - accuracy: 0.6766 - val_loss: 0.8467 - val_accuracy: 0.7363 - 241ms/epoch - 5ms/step Epoch 82/300 46/46 - 0s - loss: 1.0241 - accuracy: 0.6809 - val_loss: 0.9900 - val_accuracy: 0.6900 - 242ms/epoch - 5ms/step Epoch 83/300 46/46 - 0s - loss: 1.0541 - accuracy: 0.6799 - val_loss: 0.8056 - val_accuracy: 0.7530 - 245ms/epoch - 5ms/step Epoch 84/300 46/46 - 0s - loss: 1.0107 - accuracy: 0.6883 - val_loss: 0.8049 - val_accuracy: 0.7567 - 242ms/epoch - 5ms/step Epoch 85/300 46/46 - 0s - loss: 1.0126 - accuracy: 0.6832 - val_loss: 0.7989 - val_accuracy: 0.7567 - 239ms/epoch - 5ms/step Epoch 86/300 46/46 - 0s - loss: 0.9940 - accuracy: 0.6918 - val_loss: 0.7892 - val_accuracy: 0.7657 - 242ms/epoch - 5ms/step Epoch 87/300 46/46 - 0s - loss: 0.9943 - accuracy: 0.6954 - val_loss: 0.7946 - val_accuracy: 0.7663 - 244ms/epoch - 5ms/step Epoch 88/300 46/46 - 0s - loss: 1.0075 - accuracy: 0.6827 - val_loss: 0.7905 - val_accuracy: 0.7647 - 239ms/epoch - 5ms/step Epoch 89/300 46/46 - 0s - loss: 1.0159 - accuracy: 0.6936 - val_loss: 0.8179 - val_accuracy: 0.7490 - 237ms/epoch - 5ms/step Epoch 90/300 46/46 - 0s - loss: 1.0025 - accuracy: 0.6901 - val_loss: 0.8132 - val_accuracy: 0.7630 - 241ms/epoch - 5ms/step Epoch 91/300 46/46 - 0s - loss: 0.9852 - accuracy: 0.6952 - val_loss: 0.7740 - val_accuracy: 0.7740 - 242ms/epoch - 5ms/step Epoch 92/300 46/46 - 0s - loss: 0.9770 - accuracy: 0.6972 - val_loss: 0.7843 - val_accuracy: 0.7630 - 243ms/epoch - 5ms/step Epoch 93/300 46/46 - 0s - loss: 0.9607 - accuracy: 0.7023 - val_loss: 0.8402 - val_accuracy: 0.7463 - 245ms/epoch - 5ms/step Epoch 94/300 46/46 - 0s - loss: 0.9963 - accuracy: 0.6948 - val_loss: 0.7753 - val_accuracy: 0.7747 - 239ms/epoch - 5ms/step Epoch 95/300 46/46 - 0s - loss: 0.9638 - accuracy: 0.7015 - val_loss: 0.8501 - val_accuracy: 0.7393 - 242ms/epoch - 5ms/step Epoch 96/300 46/46 - 0s - loss: 0.9647 - accuracy: 0.7043 - val_loss: 0.7738 - val_accuracy: 0.7753 - 241ms/epoch - 5ms/step Epoch 97/300 46/46 - 0s - loss: 0.9663 - accuracy: 0.7010 - val_loss: 0.7420 - val_accuracy: 0.7860 - 245ms/epoch - 5ms/step Epoch 98/300 46/46 - 0s - loss: 0.9459 - accuracy: 0.7117 - val_loss: 0.7502 - val_accuracy: 0.7747 - 243ms/epoch - 5ms/step Epoch 99/300 46/46 - 0s - loss: 0.9256 - accuracy: 0.7136 - val_loss: 0.7598 - val_accuracy: 0.7690 - 243ms/epoch - 5ms/step Epoch 100/300 46/46 - 0s - loss: 0.9447 - accuracy: 0.7130 - val_loss: 0.9506 - val_accuracy: 0.7053 - 248ms/epoch - 5ms/step Epoch 101/300 46/46 - 0s - loss: 0.9506 - accuracy: 0.7040 - val_loss: 0.7909 - val_accuracy: 0.7677 - 239ms/epoch - 5ms/step Epoch 102/300 46/46 - 0s - loss: 0.9321 - accuracy: 0.7126 - val_loss: 0.7986 - val_accuracy: 0.7643 - 243ms/epoch - 5ms/step Epoch 103/300 46/46 - 0s - loss: 0.9509 - accuracy: 0.7091 - val_loss: 0.7411 - val_accuracy: 0.7803 - 244ms/epoch - 5ms/step Epoch 104/300 46/46 - 0s - loss: 0.9028 - accuracy: 0.7179 - val_loss: 0.7225 - val_accuracy: 0.7883 - 249ms/epoch - 5ms/step Epoch 105/300 46/46 - 0s - loss: 0.9187 - accuracy: 0.7141 - val_loss: 0.7278 - val_accuracy: 0.7857 - 242ms/epoch - 5ms/step Epoch 106/300 46/46 - 0s - loss: 0.9108 - accuracy: 0.7137 - val_loss: 0.7084 - val_accuracy: 0.7923 - 243ms/epoch - 5ms/step Epoch 107/300 46/46 - 0s - loss: 0.9165 - accuracy: 0.7172 - val_loss: 0.7123 - val_accuracy: 0.7877 - 243ms/epoch - 5ms/step Epoch 108/300 46/46 - 0s - loss: 0.9156 - accuracy: 0.7151 - val_loss: 0.7155 - val_accuracy: 0.7883 - 241ms/epoch - 5ms/step Epoch 109/300 46/46 - 0s - loss: 0.9082 - accuracy: 0.7178 - val_loss: 0.7296 - val_accuracy: 0.7827 - 239ms/epoch - 5ms/step Epoch 110/300 46/46 - 0s - loss: 0.9052 - accuracy: 0.7213 - val_loss: 0.7066 - val_accuracy: 0.7893 - 240ms/epoch - 5ms/step Epoch 111/300 46/46 - 0s - loss: 0.9065 - accuracy: 0.7151 - val_loss: 0.7430 - val_accuracy: 0.7760 - 251ms/epoch - 5ms/step Epoch 112/300 46/46 - 0s - loss: 0.8839 - accuracy: 0.7292 - val_loss: 0.6939 - val_accuracy: 0.7937 - 253ms/epoch - 6ms/step Epoch 113/300 46/46 - 0s - loss: 0.8820 - accuracy: 0.7277 - val_loss: 0.7625 - val_accuracy: 0.7727 - 249ms/epoch - 5ms/step Epoch 114/300 46/46 - 0s - loss: 0.8845 - accuracy: 0.7280 - val_loss: 0.7212 - val_accuracy: 0.7803 - 247ms/epoch - 5ms/step Epoch 115/300 46/46 - 0s - loss: 0.8947 - accuracy: 0.7297 - val_loss: 0.7205 - val_accuracy: 0.7893 - 242ms/epoch - 5ms/step Epoch 116/300 46/46 - 0s - loss: 0.8618 - accuracy: 0.7353 - val_loss: 0.7030 - val_accuracy: 0.7930 - 245ms/epoch - 5ms/step Epoch 117/300 46/46 - 0s - loss: 0.8528 - accuracy: 0.7352 - val_loss: 0.6832 - val_accuracy: 0.7967 - 254ms/epoch - 6ms/step Epoch 118/300 46/46 - 0s - loss: 0.8455 - accuracy: 0.7410 - val_loss: 0.7073 - val_accuracy: 0.7913 - 255ms/epoch - 6ms/step Epoch 119/300 46/46 - 0s - loss: 0.8373 - accuracy: 0.7441 - val_loss: 0.7050 - val_accuracy: 0.7837 - 255ms/epoch - 6ms/step Epoch 120/300 46/46 - 0s - loss: 0.8384 - accuracy: 0.7441 - val_loss: 0.6915 - val_accuracy: 0.7933 - 248ms/epoch - 5ms/step Epoch 121/300 46/46 - 0s - loss: 0.8435 - accuracy: 0.7314 - val_loss: 0.6492 - val_accuracy: 0.8093 - 238ms/epoch - 5ms/step Epoch 122/300 46/46 - 0s - loss: 0.8544 - accuracy: 0.7365 - val_loss: 0.6849 - val_accuracy: 0.8000 - 241ms/epoch - 5ms/step Epoch 123/300 46/46 - 0s - loss: 0.9212 - accuracy: 0.7171 - val_loss: 0.6738 - val_accuracy: 0.7990 - 251ms/epoch - 5ms/step Epoch 124/300 46/46 - 0s - loss: 0.8714 - accuracy: 0.7326 - val_loss: 0.6655 - val_accuracy: 0.8070 - 253ms/epoch - 5ms/step Epoch 125/300 46/46 - 0s - loss: 0.8296 - accuracy: 0.7368 - val_loss: 0.6863 - val_accuracy: 0.7987 - 248ms/epoch - 5ms/step Epoch 126/300 46/46 - 0s - loss: 0.8510 - accuracy: 0.7340 - val_loss: 0.6718 - val_accuracy: 0.8050 - 246ms/epoch - 5ms/step Epoch 127/300 46/46 - 0s - loss: 0.8244 - accuracy: 0.7477 - val_loss: 0.7033 - val_accuracy: 0.7903 - 249ms/epoch - 5ms/step Epoch 128/300 46/46 - 0s - loss: 0.8210 - accuracy: 0.7471 - val_loss: 0.6675 - val_accuracy: 0.7993 - 242ms/epoch - 5ms/step Epoch 129/300 46/46 - 0s - loss: 0.8109 - accuracy: 0.7490 - val_loss: 0.6925 - val_accuracy: 0.7890 - 240ms/epoch - 5ms/step Epoch 130/300 46/46 - 0s - loss: 0.8227 - accuracy: 0.7462 - val_loss: 0.6989 - val_accuracy: 0.7913 - 241ms/epoch - 5ms/step Epoch 131/300 46/46 - 0s - loss: 0.8080 - accuracy: 0.7478 - val_loss: 0.6692 - val_accuracy: 0.8020 - 242ms/epoch - 5ms/step Epoch 132/300 46/46 - 0s - loss: 0.8006 - accuracy: 0.7542 - val_loss: 0.6565 - val_accuracy: 0.8043 - 242ms/epoch - 5ms/step Epoch 133/300 46/46 - 0s - loss: 0.8290 - accuracy: 0.7414 - val_loss: 0.6308 - val_accuracy: 0.8140 - 244ms/epoch - 5ms/step Epoch 134/300 46/46 - 0s - loss: 0.8036 - accuracy: 0.7511 - val_loss: 0.6451 - val_accuracy: 0.8093 - 243ms/epoch - 5ms/step Epoch 135/300 46/46 - 0s - loss: 0.7906 - accuracy: 0.7555 - val_loss: 0.6182 - val_accuracy: 0.8157 - 243ms/epoch - 5ms/step Epoch 136/300 46/46 - 0s - loss: 0.7862 - accuracy: 0.7578 - val_loss: 0.6409 - val_accuracy: 0.8050 - 243ms/epoch - 5ms/step Epoch 137/300 46/46 - 0s - loss: 0.7840 - accuracy: 0.7576 - val_loss: 0.6814 - val_accuracy: 0.7970 - 241ms/epoch - 5ms/step Epoch 138/300 46/46 - 0s - loss: 0.8038 - accuracy: 0.7535 - val_loss: 0.6252 - val_accuracy: 0.8180 - 241ms/epoch - 5ms/step Epoch 139/300 46/46 - 0s - loss: 0.7951 - accuracy: 0.7523 - val_loss: 0.6206 - val_accuracy: 0.8167 - 246ms/epoch - 5ms/step Epoch 140/300 46/46 - 0s - loss: 0.7933 - accuracy: 0.7549 - val_loss: 0.6261 - val_accuracy: 0.8123 - 240ms/epoch - 5ms/step Epoch 141/300 46/46 - 0s - loss: 0.7570 - accuracy: 0.7647 - val_loss: 0.6358 - val_accuracy: 0.8120 - 239ms/epoch - 5ms/step Epoch 142/300 46/46 - 0s - loss: 0.7899 - accuracy: 0.7532 - val_loss: 0.6334 - val_accuracy: 0.8163 - 238ms/epoch - 5ms/step Epoch 143/300 46/46 - 0s - loss: 0.7588 - accuracy: 0.7650 - val_loss: 0.6075 - val_accuracy: 0.8193 - 244ms/epoch - 5ms/step Epoch 144/300 46/46 - 0s - loss: 0.7627 - accuracy: 0.7632 - val_loss: 0.6063 - val_accuracy: 0.8187 - 238ms/epoch - 5ms/step Epoch 145/300 46/46 - 0s - loss: 0.7646 - accuracy: 0.7692 - val_loss: 0.6031 - val_accuracy: 0.8273 - 241ms/epoch - 5ms/step Epoch 146/300 46/46 - 0s - loss: 0.7741 - accuracy: 0.7633 - val_loss: 0.6102 - val_accuracy: 0.8213 - 240ms/epoch - 5ms/step Epoch 147/300 46/46 - 0s - loss: 0.7532 - accuracy: 0.7620 - val_loss: 0.5937 - val_accuracy: 0.8273 - 246ms/epoch - 5ms/step Epoch 148/300 46/46 - 0s - loss: 0.7657 - accuracy: 0.7643 - val_loss: 0.6118 - val_accuracy: 0.8163 - 244ms/epoch - 5ms/step Epoch 149/300 46/46 - 0s - loss: 0.7584 - accuracy: 0.7657 - val_loss: 0.6185 - val_accuracy: 0.8190 - 247ms/epoch - 5ms/step Epoch 150/300 46/46 - 0s - loss: 0.7406 - accuracy: 0.7736 - val_loss: 0.5850 - val_accuracy: 0.8313 - 241ms/epoch - 5ms/step Epoch 151/300 46/46 - 0s - loss: 0.7458 - accuracy: 0.7700 - val_loss: 0.6473 - val_accuracy: 0.8100 - 245ms/epoch - 5ms/step Epoch 152/300 46/46 - 0s - loss: 0.7441 - accuracy: 0.7692 - val_loss: 0.5679 - val_accuracy: 0.8327 - 252ms/epoch - 5ms/step Epoch 153/300 46/46 - 0s - loss: 0.7335 - accuracy: 0.7768 - val_loss: 0.5870 - val_accuracy: 0.8290 - 245ms/epoch - 5ms/step Epoch 154/300 46/46 - 0s - loss: 0.7127 - accuracy: 0.7799 - val_loss: 0.6236 - val_accuracy: 0.8190 - 240ms/epoch - 5ms/step Epoch 155/300 46/46 - 0s - loss: 0.7495 - accuracy: 0.7669 - val_loss: 0.5826 - val_accuracy: 0.8307 - 244ms/epoch - 5ms/step Epoch 156/300 46/46 - 0s - loss: 0.7221 - accuracy: 0.7807 - val_loss: 0.5977 - val_accuracy: 0.8267 - 245ms/epoch - 5ms/step Epoch 157/300 46/46 - 0s - loss: 0.7071 - accuracy: 0.7807 - val_loss: 0.7105 - val_accuracy: 0.7837 - 249ms/epoch - 5ms/step Epoch 158/300 46/46 - 0s - loss: 0.7548 - accuracy: 0.7645 - val_loss: 0.6237 - val_accuracy: 0.8197 - 249ms/epoch - 5ms/step Epoch 159/300 46/46 - 0s - loss: 0.7183 - accuracy: 0.7734 - val_loss: 0.5746 - val_accuracy: 0.8350 - 257ms/epoch - 6ms/step Epoch 160/300 46/46 - 0s - loss: 0.7232 - accuracy: 0.7769 - val_loss: 0.5718 - val_accuracy: 0.8350 - 247ms/epoch - 5ms/step Epoch 161/300 46/46 - 0s - loss: 0.7119 - accuracy: 0.7799 - val_loss: 0.5788 - val_accuracy: 0.8357 - 243ms/epoch - 5ms/step Epoch 162/300 46/46 - 0s - loss: 0.7412 - accuracy: 0.7728 - val_loss: 0.5712 - val_accuracy: 0.8353 - 243ms/epoch - 5ms/step Epoch 163/300 46/46 - 0s - loss: 0.7186 - accuracy: 0.7750 - val_loss: 0.5637 - val_accuracy: 0.8357 - 248ms/epoch - 5ms/step Epoch 164/300 46/46 - 0s - loss: 0.7055 - accuracy: 0.7897 - val_loss: 0.6074 - val_accuracy: 0.8243 - 244ms/epoch - 5ms/step Epoch 165/300 46/46 - 0s - loss: 0.6865 - accuracy: 0.7853 - val_loss: 0.5989 - val_accuracy: 0.8197 - 251ms/epoch - 5ms/step Epoch 166/300 46/46 - 0s - loss: 0.7205 - accuracy: 0.7767 - val_loss: 0.5606 - val_accuracy: 0.8367 - 246ms/epoch - 5ms/step Epoch 167/300 46/46 - 0s - loss: 0.7012 - accuracy: 0.7809 - val_loss: 0.5527 - val_accuracy: 0.8400 - 251ms/epoch - 5ms/step Epoch 168/300 46/46 - 0s - loss: 0.6978 - accuracy: 0.7851 - val_loss: 0.5528 - val_accuracy: 0.8383 - 248ms/epoch - 5ms/step Epoch 169/300 46/46 - 0s - loss: 0.6989 - accuracy: 0.7870 - val_loss: 0.6211 - val_accuracy: 0.8177 - 245ms/epoch - 5ms/step Epoch 170/300 46/46 - 0s - loss: 0.7037 - accuracy: 0.7833 - val_loss: 0.5554 - val_accuracy: 0.8457 - 242ms/epoch - 5ms/step Epoch 171/300 46/46 - 0s - loss: 0.6935 - accuracy: 0.7894 - val_loss: 0.5818 - val_accuracy: 0.8257 - 246ms/epoch - 5ms/step Epoch 172/300 46/46 - 0s - loss: 0.6856 - accuracy: 0.7911 - val_loss: 0.5757 - val_accuracy: 0.8297 - 243ms/epoch - 5ms/step Epoch 173/300 46/46 - 0s - loss: 0.7003 - accuracy: 0.7907 - val_loss: 0.5923 - val_accuracy: 0.8270 - 241ms/epoch - 5ms/step Epoch 174/300 46/46 - 0s - loss: 0.6728 - accuracy: 0.7910 - val_loss: 0.6113 - val_accuracy: 0.8283 - 242ms/epoch - 5ms/step Epoch 175/300 46/46 - 0s - loss: 0.7024 - accuracy: 0.7862 - val_loss: 0.6084 - val_accuracy: 0.8167 - 246ms/epoch - 5ms/step Epoch 176/300 46/46 - 0s - loss: 0.6896 - accuracy: 0.7928 - val_loss: 0.5532 - val_accuracy: 0.8450 - 245ms/epoch - 5ms/step Epoch 177/300 46/46 - 0s - loss: 0.6659 - accuracy: 0.7912 - val_loss: 0.5269 - val_accuracy: 0.8487 - 243ms/epoch - 5ms/step Epoch 178/300 46/46 - 0s - loss: 0.6914 - accuracy: 0.7903 - val_loss: 0.6001 - val_accuracy: 0.8243 - 249ms/epoch - 5ms/step Epoch 179/300 46/46 - 0s - loss: 0.6691 - accuracy: 0.7951 - val_loss: 0.5893 - val_accuracy: 0.8267 - 250ms/epoch - 5ms/step Epoch 180/300 46/46 - 0s - loss: 0.6583 - accuracy: 0.7980 - val_loss: 0.5442 - val_accuracy: 0.8433 - 242ms/epoch - 5ms/step Epoch 181/300 46/46 - 0s - loss: 0.6621 - accuracy: 0.7961 - val_loss: 0.5334 - val_accuracy: 0.8487 - 248ms/epoch - 5ms/step Epoch 182/300 46/46 - 0s - loss: 0.6534 - accuracy: 0.7979 - val_loss: 0.5325 - val_accuracy: 0.8457 - 240ms/epoch - 5ms/step Epoch 183/300 46/46 - 0s - loss: 0.6492 - accuracy: 0.8011 - val_loss: 0.5408 - val_accuracy: 0.8450 - 245ms/epoch - 5ms/step Epoch 184/300 46/46 - 0s - loss: 0.6388 - accuracy: 0.8034 - val_loss: 0.5244 - val_accuracy: 0.8510 - 245ms/epoch - 5ms/step Epoch 185/300 46/46 - 0s - loss: 0.6470 - accuracy: 0.8057 - val_loss: 0.5179 - val_accuracy: 0.8517 - 252ms/epoch - 5ms/step Epoch 186/300 46/46 - 0s - loss: 0.6620 - accuracy: 0.7935 - val_loss: 0.5370 - val_accuracy: 0.8483 - 247ms/epoch - 5ms/step Epoch 187/300 46/46 - 0s - loss: 0.6397 - accuracy: 0.8048 - val_loss: 0.5684 - val_accuracy: 0.8363 - 249ms/epoch - 5ms/step Epoch 188/300 46/46 - 0s - loss: 0.6288 - accuracy: 0.8044 - val_loss: 0.5292 - val_accuracy: 0.8510 - 256ms/epoch - 6ms/step Epoch 189/300 46/46 - 0s - loss: 0.6461 - accuracy: 0.8053 - val_loss: 0.5845 - val_accuracy: 0.8300 - 244ms/epoch - 5ms/step Epoch 190/300 46/46 - 0s - loss: 0.6512 - accuracy: 0.8004 - val_loss: 0.5288 - val_accuracy: 0.8477 - 241ms/epoch - 5ms/step Epoch 191/300 46/46 - 0s - loss: 0.6279 - accuracy: 0.8062 - val_loss: 0.5256 - val_accuracy: 0.8517 - 247ms/epoch - 5ms/step Epoch 192/300 46/46 - 0s - loss: 0.6468 - accuracy: 0.8015 - val_loss: 0.5444 - val_accuracy: 0.8437 - 246ms/epoch - 5ms/step Epoch 193/300 46/46 - 0s - loss: 0.6523 - accuracy: 0.8047 - val_loss: 0.5569 - val_accuracy: 0.8387 - 245ms/epoch - 5ms/step Epoch 194/300 46/46 - 0s - loss: 0.6447 - accuracy: 0.7969 - val_loss: 0.5404 - val_accuracy: 0.8523 - 243ms/epoch - 5ms/step Epoch 195/300 46/46 - 0s - loss: 0.6089 - accuracy: 0.8108 - val_loss: 0.5009 - val_accuracy: 0.8567 - 244ms/epoch - 5ms/step Epoch 196/300 46/46 - 0s - loss: 0.6279 - accuracy: 0.8117 - val_loss: 0.4986 - val_accuracy: 0.8597 - 243ms/epoch - 5ms/step Epoch 197/300 46/46 - 0s - loss: 0.6296 - accuracy: 0.8033 - val_loss: 0.5549 - val_accuracy: 0.8470 - 241ms/epoch - 5ms/step Epoch 198/300 46/46 - 0s - loss: 0.6307 - accuracy: 0.8037 - val_loss: 0.5006 - val_accuracy: 0.8587 - 242ms/epoch - 5ms/step Epoch 199/300 46/46 - 0s - loss: 0.6318 - accuracy: 0.8073 - val_loss: 0.4881 - val_accuracy: 0.8650 - 247ms/epoch - 5ms/step Epoch 200/300 46/46 - 0s - loss: 0.6113 - accuracy: 0.8108 - val_loss: 0.5293 - val_accuracy: 0.8490 - 244ms/epoch - 5ms/step Epoch 201/300 46/46 - 0s - loss: 0.6245 - accuracy: 0.8069 - val_loss: 0.5279 - val_accuracy: 0.8437 - 243ms/epoch - 5ms/step Epoch 202/300 46/46 - 0s - loss: 0.6068 - accuracy: 0.8141 - val_loss: 0.5217 - val_accuracy: 0.8503 - 241ms/epoch - 5ms/step Epoch 203/300 46/46 - 0s - loss: 0.6096 - accuracy: 0.8114 - val_loss: 0.4902 - val_accuracy: 0.8630 - 243ms/epoch - 5ms/step Epoch 204/300 46/46 - 0s - loss: 0.5950 - accuracy: 0.8156 - val_loss: 0.5283 - val_accuracy: 0.8513 - 247ms/epoch - 5ms/step Epoch 205/300 46/46 - 0s - loss: 0.6305 - accuracy: 0.8082 - val_loss: 0.5056 - val_accuracy: 0.8540 - 241ms/epoch - 5ms/step Epoch 206/300 46/46 - 0s - loss: 0.6122 - accuracy: 0.8121 - val_loss: 0.5068 - val_accuracy: 0.8597 - 242ms/epoch - 5ms/step Epoch 207/300 46/46 - 0s - loss: 0.5979 - accuracy: 0.8154 - val_loss: 0.4966 - val_accuracy: 0.8597 - 247ms/epoch - 5ms/step Epoch 208/300 46/46 - 0s - loss: 0.5900 - accuracy: 0.8165 - val_loss: 0.5139 - val_accuracy: 0.8547 - 243ms/epoch - 5ms/step Epoch 209/300 46/46 - 0s - loss: 0.5925 - accuracy: 0.8201 - val_loss: 0.4859 - val_accuracy: 0.8650 - 245ms/epoch - 5ms/step Epoch 210/300 46/46 - 0s - loss: 0.6076 - accuracy: 0.8162 - val_loss: 0.5349 - val_accuracy: 0.8517 - 243ms/epoch - 5ms/step Epoch 211/300 46/46 - 0s - loss: 0.5971 - accuracy: 0.8204 - val_loss: 0.4874 - val_accuracy: 0.8633 - 243ms/epoch - 5ms/step Epoch 212/300 46/46 - 0s - loss: 0.5955 - accuracy: 0.8238 - val_loss: 0.4922 - val_accuracy: 0.8643 - 241ms/epoch - 5ms/step Epoch 213/300 46/46 - 0s - loss: 0.5941 - accuracy: 0.8195 - val_loss: 0.5090 - val_accuracy: 0.8567 - 242ms/epoch - 5ms/step Epoch 214/300 46/46 - 0s - loss: 0.5981 - accuracy: 0.8161 - val_loss: 0.4757 - val_accuracy: 0.8670 - 242ms/epoch - 5ms/step Epoch 215/300 46/46 - 0s - loss: 0.5775 - accuracy: 0.8230 - val_loss: 0.4870 - val_accuracy: 0.8650 - 242ms/epoch - 5ms/step Epoch 216/300 46/46 - 0s - loss: 0.5821 - accuracy: 0.8160 - val_loss: 0.5028 - val_accuracy: 0.8543 - 242ms/epoch - 5ms/step Epoch 217/300 46/46 - 0s - loss: 0.5847 - accuracy: 0.8188 - val_loss: 0.4892 - val_accuracy: 0.8670 - 239ms/epoch - 5ms/step Epoch 218/300 46/46 - 0s - loss: 0.5788 - accuracy: 0.8247 - val_loss: 0.4770 - val_accuracy: 0.8673 - 244ms/epoch - 5ms/step Epoch 219/300 46/46 - 0s - loss: 0.5898 - accuracy: 0.8219 - val_loss: 0.4866 - val_accuracy: 0.8640 - 244ms/epoch - 5ms/step Epoch 220/300 46/46 - 0s - loss: 0.5926 - accuracy: 0.8180 - val_loss: 0.4894 - val_accuracy: 0.8623 - 243ms/epoch - 5ms/step Epoch 221/300 46/46 - 0s - loss: 0.5850 - accuracy: 0.8165 - val_loss: 0.4731 - val_accuracy: 0.8700 - 249ms/epoch - 5ms/step Epoch 222/300 46/46 - 0s - loss: 0.5829 - accuracy: 0.8213 - val_loss: 0.5524 - val_accuracy: 0.8420 - 262ms/epoch - 6ms/step Epoch 223/300 46/46 - 0s - loss: 0.5977 - accuracy: 0.8196 - val_loss: 0.4955 - val_accuracy: 0.8587 - 263ms/epoch - 6ms/step Epoch 224/300 46/46 - 0s - loss: 0.5672 - accuracy: 0.8219 - val_loss: 0.5151 - val_accuracy: 0.8537 - 256ms/epoch - 6ms/step Epoch 225/300 46/46 - 0s - loss: 0.5960 - accuracy: 0.8170 - val_loss: 0.4802 - val_accuracy: 0.8677 - 256ms/epoch - 6ms/step Epoch 226/300 46/46 - 0s - loss: 0.5690 - accuracy: 0.8301 - val_loss: 0.4740 - val_accuracy: 0.8673 - 253ms/epoch - 6ms/step Epoch 227/300 46/46 - 0s - loss: 0.6034 - accuracy: 0.8190 - val_loss: 0.4942 - val_accuracy: 0.8697 - 253ms/epoch - 6ms/step Epoch 228/300 46/46 - 0s - loss: 0.6007 - accuracy: 0.8166 - val_loss: 0.4844 - val_accuracy: 0.8630 - 244ms/epoch - 5ms/step Epoch 229/300 46/46 - 0s - loss: 0.5360 - accuracy: 0.8335 - val_loss: 0.4755 - val_accuracy: 0.8630 - 246ms/epoch - 5ms/step Epoch 230/300 46/46 - 0s - loss: 0.5804 - accuracy: 0.8211 - val_loss: 0.4778 - val_accuracy: 0.8720 - 241ms/epoch - 5ms/step Epoch 231/300 46/46 - 0s - loss: 0.6041 - accuracy: 0.8175 - val_loss: 0.5459 - val_accuracy: 0.8447 - 239ms/epoch - 5ms/step Epoch 232/300 46/46 - 0s - loss: 0.5662 - accuracy: 0.8203 - val_loss: 0.4830 - val_accuracy: 0.8660 - 249ms/epoch - 5ms/step Epoch 233/300 46/46 - 0s - loss: 0.5675 - accuracy: 0.8291 - val_loss: 0.4618 - val_accuracy: 0.8747 - 248ms/epoch - 5ms/step Epoch 234/300 46/46 - 0s - loss: 0.5575 - accuracy: 0.8306 - val_loss: 0.4718 - val_accuracy: 0.8703 - 249ms/epoch - 5ms/step Epoch 235/300 46/46 - 0s - loss: 0.5671 - accuracy: 0.8299 - val_loss: 0.4727 - val_accuracy: 0.8733 - 243ms/epoch - 5ms/step Epoch 236/300 46/46 - 0s - loss: 0.5725 - accuracy: 0.8319 - val_loss: 0.4844 - val_accuracy: 0.8637 - 244ms/epoch - 5ms/step Epoch 237/300 46/46 - 0s - loss: 0.5786 - accuracy: 0.8296 - val_loss: 0.4669 - val_accuracy: 0.8717 - 243ms/epoch - 5ms/step Epoch 238/300 46/46 - 0s - loss: 0.5437 - accuracy: 0.8283 - val_loss: 0.4912 - val_accuracy: 0.8677 - 240ms/epoch - 5ms/step Epoch 239/300 46/46 - 0s - loss: 0.5475 - accuracy: 0.8323 - val_loss: 0.4647 - val_accuracy: 0.8757 - 244ms/epoch - 5ms/step Epoch 240/300 46/46 - 0s - loss: 0.5684 - accuracy: 0.8255 - val_loss: 0.5044 - val_accuracy: 0.8677 - 242ms/epoch - 5ms/step Epoch 241/300 46/46 - 0s - loss: 0.5371 - accuracy: 0.8335 - val_loss: 0.4586 - val_accuracy: 0.8757 - 248ms/epoch - 5ms/step Epoch 242/300 46/46 - 0s - loss: 0.5663 - accuracy: 0.8290 - val_loss: 0.4442 - val_accuracy: 0.8770 - 253ms/epoch - 5ms/step Epoch 243/300 46/46 - 0s - loss: 0.5283 - accuracy: 0.8412 - val_loss: 0.4687 - val_accuracy: 0.8657 - 262ms/epoch - 6ms/step Epoch 244/300 46/46 - 0s - loss: 0.5486 - accuracy: 0.8345 - val_loss: 0.4621 - val_accuracy: 0.8733 - 259ms/epoch - 6ms/step Epoch 245/300 46/46 - 0s - loss: 0.5488 - accuracy: 0.8301 - val_loss: 0.4509 - val_accuracy: 0.8753 - 243ms/epoch - 5ms/step Epoch 246/300 46/46 - 0s - loss: 0.5423 - accuracy: 0.8352 - val_loss: 0.4677 - val_accuracy: 0.8710 - 239ms/epoch - 5ms/step Epoch 247/300 46/46 - 0s - loss: 0.5394 - accuracy: 0.8326 - val_loss: 0.4474 - val_accuracy: 0.8800 - 244ms/epoch - 5ms/step Epoch 248/300 46/46 - 0s - loss: 0.5434 - accuracy: 0.8354 - val_loss: 0.4726 - val_accuracy: 0.8680 - 245ms/epoch - 5ms/step Epoch 249/300 46/46 - 0s - loss: 0.5605 - accuracy: 0.8307 - val_loss: 0.4920 - val_accuracy: 0.8650 - 246ms/epoch - 5ms/step Epoch 250/300 46/46 - 0s - loss: 0.5165 - accuracy: 0.8388 - val_loss: 0.4492 - val_accuracy: 0.8797 - 241ms/epoch - 5ms/step Epoch 251/300 46/46 - 0s - loss: 0.5182 - accuracy: 0.8432 - val_loss: 0.4375 - val_accuracy: 0.8763 - 240ms/epoch - 5ms/step Epoch 252/300 46/46 - 0s - loss: 0.5287 - accuracy: 0.8387 - val_loss: 0.4394 - val_accuracy: 0.8820 - 244ms/epoch - 5ms/step Epoch 253/300 46/46 - 0s - loss: 0.5129 - accuracy: 0.8385 - val_loss: 0.4918 - val_accuracy: 0.8633 - 243ms/epoch - 5ms/step Epoch 254/300 46/46 - 0s - loss: 0.5350 - accuracy: 0.8396 - val_loss: 0.4434 - val_accuracy: 0.8790 - 241ms/epoch - 5ms/step Epoch 255/300 46/46 - 0s - loss: 0.5332 - accuracy: 0.8392 - val_loss: 0.4747 - val_accuracy: 0.8723 - 240ms/epoch - 5ms/step Epoch 256/300 46/46 - 0s - loss: 0.5443 - accuracy: 0.8340 - val_loss: 0.5203 - val_accuracy: 0.8553 - 243ms/epoch - 5ms/step Epoch 257/300 46/46 - 0s - loss: 0.5563 - accuracy: 0.8335 - val_loss: 0.4900 - val_accuracy: 0.8663 - 241ms/epoch - 5ms/step Epoch 258/300 46/46 - 0s - loss: 0.5161 - accuracy: 0.8455 - val_loss: 0.4608 - val_accuracy: 0.8763 - 249ms/epoch - 5ms/step Epoch 259/300 46/46 - 0s - loss: 0.5617 - accuracy: 0.8272 - val_loss: 0.4827 - val_accuracy: 0.8727 - 247ms/epoch - 5ms/step Epoch 260/300 46/46 - 0s - loss: 0.5340 - accuracy: 0.8366 - val_loss: 0.4630 - val_accuracy: 0.8683 - 247ms/epoch - 5ms/step Epoch 261/300 46/46 - 0s - loss: 0.5105 - accuracy: 0.8487 - val_loss: 0.4552 - val_accuracy: 0.8803 - 243ms/epoch - 5ms/step Epoch 262/300 46/46 - 0s - loss: 0.5039 - accuracy: 0.8433 - val_loss: 0.4705 - val_accuracy: 0.8667 - 240ms/epoch - 5ms/step Epoch 263/300 46/46 - 0s - loss: 0.5401 - accuracy: 0.8354 - val_loss: 0.4493 - val_accuracy: 0.8790 - 242ms/epoch - 5ms/step Epoch 264/300 46/46 - 0s - loss: 0.5153 - accuracy: 0.8397 - val_loss: 0.4637 - val_accuracy: 0.8773 - 239ms/epoch - 5ms/step Epoch 265/300 46/46 - 0s - loss: 0.4926 - accuracy: 0.8439 - val_loss: 0.4271 - val_accuracy: 0.8853 - 240ms/epoch - 5ms/step Epoch 266/300 46/46 - 0s - loss: 0.5210 - accuracy: 0.8459 - val_loss: 0.4450 - val_accuracy: 0.8820 - 241ms/epoch - 5ms/step Epoch 267/300 46/46 - 0s - loss: 0.4949 - accuracy: 0.8474 - val_loss: 0.4424 - val_accuracy: 0.8837 - 245ms/epoch - 5ms/step Epoch 268/300 46/46 - 0s - loss: 0.5031 - accuracy: 0.8502 - val_loss: 0.4748 - val_accuracy: 0.8703 - 243ms/epoch - 5ms/step Epoch 269/300 46/46 - 0s - loss: 0.5163 - accuracy: 0.8454 - val_loss: 0.4499 - val_accuracy: 0.8753 - 241ms/epoch - 5ms/step Epoch 270/300 46/46 - 0s - loss: 0.5025 - accuracy: 0.8464 - val_loss: 0.4253 - val_accuracy: 0.8880 - 238ms/epoch - 5ms/step Epoch 271/300 46/46 - 0s - loss: 0.5150 - accuracy: 0.8437 - val_loss: 0.4358 - val_accuracy: 0.8833 - 243ms/epoch - 5ms/step Epoch 272/300 46/46 - 0s - loss: 0.4986 - accuracy: 0.8439 - val_loss: 0.4468 - val_accuracy: 0.8763 - 240ms/epoch - 5ms/step Epoch 273/300 46/46 - 0s - loss: 0.5061 - accuracy: 0.8442 - val_loss: 0.5316 - val_accuracy: 0.8560 - 238ms/epoch - 5ms/step Epoch 274/300 46/46 - 0s - loss: 0.5195 - accuracy: 0.8393 - val_loss: 0.4803 - val_accuracy: 0.8670 - 238ms/epoch - 5ms/step Epoch 275/300 46/46 - 0s - loss: 0.5166 - accuracy: 0.8482 - val_loss: 0.4422 - val_accuracy: 0.8830 - 240ms/epoch - 5ms/step Epoch 276/300 46/46 - 0s - loss: 0.5174 - accuracy: 0.8395 - val_loss: 0.4662 - val_accuracy: 0.8720 - 246ms/epoch - 5ms/step Epoch 277/300 46/46 - 0s - loss: 0.5085 - accuracy: 0.8467 - val_loss: 0.4515 - val_accuracy: 0.8813 - 252ms/epoch - 5ms/step Epoch 278/300 46/46 - 0s - loss: 0.4958 - accuracy: 0.8476 - val_loss: 0.4559 - val_accuracy: 0.8790 - 261ms/epoch - 6ms/step Epoch 279/300 46/46 - 0s - loss: 0.4997 - accuracy: 0.8511 - val_loss: 0.4288 - val_accuracy: 0.8847 - 259ms/epoch - 6ms/step Epoch 280/300 46/46 - 0s - loss: 0.5179 - accuracy: 0.8456 - val_loss: 0.4428 - val_accuracy: 0.8773 - 270ms/epoch - 6ms/step Epoch 281/300 46/46 - 0s - loss: 0.4951 - accuracy: 0.8531 - val_loss: 0.4351 - val_accuracy: 0.8837 - 254ms/epoch - 6ms/step Epoch 282/300 46/46 - 0s - loss: 0.4945 - accuracy: 0.8560 - val_loss: 0.4848 - val_accuracy: 0.8763 - 257ms/epoch - 6ms/step Epoch 283/300 46/46 - 0s - loss: 0.5063 - accuracy: 0.8482 - val_loss: 0.4349 - val_accuracy: 0.8843 - 256ms/epoch - 6ms/step Epoch 284/300 46/46 - 0s - loss: 0.5034 - accuracy: 0.8458 - val_loss: 0.4469 - val_accuracy: 0.8800 - 257ms/epoch - 6ms/step Epoch 285/300 46/46 - 0s - loss: 0.4745 - accuracy: 0.8538 - val_loss: 0.4179 - val_accuracy: 0.8890 - 253ms/epoch - 5ms/step Epoch 286/300 46/46 - 0s - loss: 0.5006 - accuracy: 0.8482 - val_loss: 0.4341 - val_accuracy: 0.8863 - 244ms/epoch - 5ms/step Epoch 287/300 46/46 - 0s - loss: 0.4843 - accuracy: 0.8526 - val_loss: 0.4388 - val_accuracy: 0.8800 - 242ms/epoch - 5ms/step Epoch 288/300 46/46 - 0s - loss: 0.4917 - accuracy: 0.8488 - val_loss: 0.4437 - val_accuracy: 0.8760 - 241ms/epoch - 5ms/step Epoch 289/300 46/46 - 0s - loss: 0.5111 - accuracy: 0.8444 - val_loss: 0.4440 - val_accuracy: 0.8793 - 240ms/epoch - 5ms/step Epoch 290/300 46/46 - 0s - loss: 0.4873 - accuracy: 0.8528 - val_loss: 0.4143 - val_accuracy: 0.8890 - 239ms/epoch - 5ms/step Epoch 291/300 46/46 - 0s - loss: 0.4818 - accuracy: 0.8508 - val_loss: 0.4311 - val_accuracy: 0.8803 - 239ms/epoch - 5ms/step Epoch 292/300 46/46 - 0s - loss: 0.4751 - accuracy: 0.8523 - val_loss: 0.4321 - val_accuracy: 0.8790 - 241ms/epoch - 5ms/step Epoch 293/300 46/46 - 0s - loss: 0.4552 - accuracy: 0.8646 - val_loss: 0.4300 - val_accuracy: 0.8857 - 239ms/epoch - 5ms/step Epoch 294/300 46/46 - 0s - loss: 0.4781 - accuracy: 0.8559 - val_loss: 0.4335 - val_accuracy: 0.8853 - 242ms/epoch - 5ms/step Epoch 295/300 46/46 - 0s - loss: 0.4800 - accuracy: 0.8521 - val_loss: 0.4298 - val_accuracy: 0.8867 - 237ms/epoch - 5ms/step Epoch 296/300 46/46 - 0s - loss: 0.4866 - accuracy: 0.8519 - val_loss: 0.4510 - val_accuracy: 0.8783 - 244ms/epoch - 5ms/step Epoch 297/300 46/46 - 0s - loss: 0.4740 - accuracy: 0.8587 - val_loss: 0.4266 - val_accuracy: 0.8840 - 239ms/epoch - 5ms/step Epoch 298/300 46/46 - 0s - loss: 0.4726 - accuracy: 0.8529 - val_loss: 0.4145 - val_accuracy: 0.8887 - 246ms/epoch - 5ms/step Epoch 299/300 46/46 - 0s - loss: 0.4614 - accuracy: 0.8614 - val_loss: 0.4322 - val_accuracy: 0.8820 - 258ms/epoch - 6ms/step Epoch 300/300 46/46 - 0s - loss: 0.4753 - accuracy: 0.8541 - val_loss: 0.4827 - val_accuracy: 0.8730 - 245ms/epoch - 5ms/step 94/94 - 0s - loss: 0.4827 - accuracy: 0.8730 - 342ms/epoch - 4ms/step Baseline Error: 12.70%
# With new data
model = GoatModel()
model.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train31_augmented, train_labels_augmented, validation_data=(X_val31, validation_labels),
epochs=500, batch_size=200, verbose=2) # no weights
scores = model.evaluate(X_val31, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df31augmentationcompare.loc[len(df31augmentationcompare)] = ['With Augmented Engineered Data', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/500
2023-11-26 13:27:09.474091: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Size of values 0 does not match size of permutation 4 @ fanin shape insequential_13/dropout_15/dropout/SelectV2-2-TransposeNHWCToNCHW-LayoutOptimizer
72/72 - 3s - loss: 2.7526 - accuracy: 0.0869 - val_loss: 2.5949 - val_accuracy: 0.1367 - 3s/epoch - 36ms/step Epoch 2/500 72/72 - 0s - loss: 2.5484 - accuracy: 0.1427 - val_loss: 2.4543 - val_accuracy: 0.2050 - 357ms/epoch - 5ms/step Epoch 3/500 72/72 - 0s - loss: 2.3749 - accuracy: 0.2001 - val_loss: 2.3098 - val_accuracy: 0.2493 - 341ms/epoch - 5ms/step Epoch 4/500 72/72 - 0s - loss: 2.2701 - accuracy: 0.2444 - val_loss: 2.1625 - val_accuracy: 0.3007 - 341ms/epoch - 5ms/step Epoch 5/500 72/72 - 0s - loss: 2.1884 - accuracy: 0.2830 - val_loss: 1.9654 - val_accuracy: 0.4050 - 339ms/epoch - 5ms/step Epoch 6/500 72/72 - 0s - loss: 2.0979 - accuracy: 0.3150 - val_loss: 1.9631 - val_accuracy: 0.3833 - 337ms/epoch - 5ms/step Epoch 7/500 72/72 - 0s - loss: 2.0311 - accuracy: 0.3419 - val_loss: 1.8587 - val_accuracy: 0.4070 - 335ms/epoch - 5ms/step Epoch 8/500 72/72 - 0s - loss: 1.9872 - accuracy: 0.3608 - val_loss: 1.7038 - val_accuracy: 0.4857 - 339ms/epoch - 5ms/step Epoch 9/500 72/72 - 0s - loss: 1.9299 - accuracy: 0.3773 - val_loss: 1.6106 - val_accuracy: 0.5230 - 334ms/epoch - 5ms/step Epoch 10/500 72/72 - 0s - loss: 1.8679 - accuracy: 0.3932 - val_loss: 1.6287 - val_accuracy: 0.5083 - 336ms/epoch - 5ms/step Epoch 11/500 72/72 - 0s - loss: 1.8507 - accuracy: 0.4044 - val_loss: 1.5495 - val_accuracy: 0.5313 - 341ms/epoch - 5ms/step Epoch 12/500 72/72 - 0s - loss: 1.7969 - accuracy: 0.4236 - val_loss: 1.5041 - val_accuracy: 0.5533 - 338ms/epoch - 5ms/step Epoch 13/500 72/72 - 0s - loss: 1.7810 - accuracy: 0.4275 - val_loss: 1.4944 - val_accuracy: 0.5530 - 336ms/epoch - 5ms/step Epoch 14/500 72/72 - 0s - loss: 1.7604 - accuracy: 0.4408 - val_loss: 1.5612 - val_accuracy: 0.5063 - 342ms/epoch - 5ms/step Epoch 15/500 72/72 - 0s - loss: 1.7127 - accuracy: 0.4543 - val_loss: 1.4232 - val_accuracy: 0.5770 - 339ms/epoch - 5ms/step Epoch 16/500 72/72 - 0s - loss: 1.6956 - accuracy: 0.4577 - val_loss: 1.4221 - val_accuracy: 0.5753 - 339ms/epoch - 5ms/step Epoch 17/500 72/72 - 0s - loss: 1.6708 - accuracy: 0.4662 - val_loss: 1.3078 - val_accuracy: 0.6227 - 339ms/epoch - 5ms/step Epoch 18/500 72/72 - 0s - loss: 1.6515 - accuracy: 0.4766 - val_loss: 1.2686 - val_accuracy: 0.6237 - 334ms/epoch - 5ms/step Epoch 19/500 72/72 - 0s - loss: 1.6281 - accuracy: 0.4782 - val_loss: 1.3858 - val_accuracy: 0.5713 - 337ms/epoch - 5ms/step Epoch 20/500 72/72 - 0s - loss: 1.6186 - accuracy: 0.4902 - val_loss: 1.2254 - val_accuracy: 0.6433 - 336ms/epoch - 5ms/step Epoch 21/500 72/72 - 0s - loss: 1.5934 - accuracy: 0.4983 - val_loss: 1.2732 - val_accuracy: 0.6247 - 337ms/epoch - 5ms/step Epoch 22/500 72/72 - 0s - loss: 1.5892 - accuracy: 0.4982 - val_loss: 1.1872 - val_accuracy: 0.6697 - 338ms/epoch - 5ms/step Epoch 23/500 72/72 - 0s - loss: 1.5503 - accuracy: 0.5056 - val_loss: 1.1484 - val_accuracy: 0.6670 - 335ms/epoch - 5ms/step Epoch 24/500 72/72 - 0s - loss: 1.5402 - accuracy: 0.5162 - val_loss: 1.1565 - val_accuracy: 0.6660 - 335ms/epoch - 5ms/step Epoch 25/500 72/72 - 0s - loss: 1.5279 - accuracy: 0.5201 - val_loss: 1.1275 - val_accuracy: 0.6723 - 334ms/epoch - 5ms/step Epoch 26/500 72/72 - 0s - loss: 1.5153 - accuracy: 0.5280 - val_loss: 1.1924 - val_accuracy: 0.6517 - 331ms/epoch - 5ms/step Epoch 27/500 72/72 - 0s - loss: 1.4798 - accuracy: 0.5334 - val_loss: 1.1424 - val_accuracy: 0.6763 - 331ms/epoch - 5ms/step Epoch 28/500 72/72 - 0s - loss: 1.4708 - accuracy: 0.5392 - val_loss: 1.1081 - val_accuracy: 0.6820 - 332ms/epoch - 5ms/step Epoch 29/500 72/72 - 0s - loss: 1.4690 - accuracy: 0.5458 - val_loss: 1.0764 - val_accuracy: 0.6983 - 347ms/epoch - 5ms/step Epoch 30/500 72/72 - 0s - loss: 1.4387 - accuracy: 0.5498 - val_loss: 1.0471 - val_accuracy: 0.6930 - 345ms/epoch - 5ms/step Epoch 31/500 72/72 - 0s - loss: 1.4280 - accuracy: 0.5560 - val_loss: 1.0340 - val_accuracy: 0.7073 - 342ms/epoch - 5ms/step Epoch 32/500 72/72 - 0s - loss: 1.4296 - accuracy: 0.5500 - val_loss: 1.0224 - val_accuracy: 0.7160 - 335ms/epoch - 5ms/step Epoch 33/500 72/72 - 0s - loss: 1.3958 - accuracy: 0.5594 - val_loss: 1.0492 - val_accuracy: 0.6960 - 334ms/epoch - 5ms/step Epoch 34/500 72/72 - 0s - loss: 1.3888 - accuracy: 0.5668 - val_loss: 0.9689 - val_accuracy: 0.7217 - 336ms/epoch - 5ms/step Epoch 35/500 72/72 - 0s - loss: 1.3759 - accuracy: 0.5654 - val_loss: 1.0518 - val_accuracy: 0.6943 - 330ms/epoch - 5ms/step Epoch 36/500 72/72 - 0s - loss: 1.3471 - accuracy: 0.5830 - val_loss: 0.9737 - val_accuracy: 0.7250 - 328ms/epoch - 5ms/step Epoch 37/500 72/72 - 0s - loss: 1.3433 - accuracy: 0.5807 - val_loss: 0.9128 - val_accuracy: 0.7333 - 334ms/epoch - 5ms/step Epoch 38/500 72/72 - 0s - loss: 1.3443 - accuracy: 0.5825 - val_loss: 0.9332 - val_accuracy: 0.7303 - 348ms/epoch - 5ms/step Epoch 39/500 72/72 - 0s - loss: 1.3490 - accuracy: 0.5794 - val_loss: 0.9190 - val_accuracy: 0.7317 - 343ms/epoch - 5ms/step Epoch 40/500 72/72 - 0s - loss: 1.3337 - accuracy: 0.5838 - val_loss: 0.9319 - val_accuracy: 0.7283 - 339ms/epoch - 5ms/step Epoch 41/500 72/72 - 0s - loss: 1.3155 - accuracy: 0.5934 - val_loss: 0.9677 - val_accuracy: 0.7113 - 331ms/epoch - 5ms/step Epoch 42/500 72/72 - 0s - loss: 1.3167 - accuracy: 0.5888 - val_loss: 0.9005 - val_accuracy: 0.7473 - 332ms/epoch - 5ms/step Epoch 43/500 72/72 - 0s - loss: 1.2987 - accuracy: 0.5934 - val_loss: 0.9090 - val_accuracy: 0.7430 - 333ms/epoch - 5ms/step Epoch 44/500 72/72 - 0s - loss: 1.2850 - accuracy: 0.5984 - val_loss: 0.9623 - val_accuracy: 0.7170 - 334ms/epoch - 5ms/step Epoch 45/500 72/72 - 0s - loss: 1.2722 - accuracy: 0.6036 - val_loss: 0.9047 - val_accuracy: 0.7393 - 330ms/epoch - 5ms/step Epoch 46/500 72/72 - 0s - loss: 1.2696 - accuracy: 0.6082 - val_loss: 0.8829 - val_accuracy: 0.7377 - 333ms/epoch - 5ms/step Epoch 47/500 72/72 - 0s - loss: 1.2634 - accuracy: 0.6036 - val_loss: 0.9058 - val_accuracy: 0.7433 - 343ms/epoch - 5ms/step Epoch 48/500 72/72 - 0s - loss: 1.2493 - accuracy: 0.6133 - val_loss: 0.8826 - val_accuracy: 0.7487 - 339ms/epoch - 5ms/step Epoch 49/500 72/72 - 0s - loss: 1.2428 - accuracy: 0.6150 - val_loss: 0.8381 - val_accuracy: 0.7640 - 337ms/epoch - 5ms/step Epoch 50/500 72/72 - 0s - loss: 1.2365 - accuracy: 0.6177 - val_loss: 0.8315 - val_accuracy: 0.7640 - 341ms/epoch - 5ms/step Epoch 51/500 72/72 - 0s - loss: 1.2301 - accuracy: 0.6165 - val_loss: 0.8311 - val_accuracy: 0.7627 - 335ms/epoch - 5ms/step Epoch 52/500 72/72 - 0s - loss: 1.2219 - accuracy: 0.6222 - val_loss: 0.8681 - val_accuracy: 0.7563 - 335ms/epoch - 5ms/step Epoch 53/500 72/72 - 0s - loss: 1.2106 - accuracy: 0.6243 - val_loss: 0.8480 - val_accuracy: 0.7570 - 332ms/epoch - 5ms/step Epoch 54/500 72/72 - 0s - loss: 1.2027 - accuracy: 0.6252 - val_loss: 0.8714 - val_accuracy: 0.7453 - 338ms/epoch - 5ms/step Epoch 55/500 72/72 - 0s - loss: 1.2061 - accuracy: 0.6248 - val_loss: 0.7984 - val_accuracy: 0.7763 - 339ms/epoch - 5ms/step Epoch 56/500 72/72 - 0s - loss: 1.1989 - accuracy: 0.6290 - val_loss: 0.7952 - val_accuracy: 0.7813 - 335ms/epoch - 5ms/step Epoch 57/500 72/72 - 0s - loss: 1.1782 - accuracy: 0.6397 - val_loss: 0.7553 - val_accuracy: 0.7900 - 334ms/epoch - 5ms/step Epoch 58/500 72/72 - 0s - loss: 1.1692 - accuracy: 0.6399 - val_loss: 0.7816 - val_accuracy: 0.7723 - 343ms/epoch - 5ms/step Epoch 59/500 72/72 - 0s - loss: 1.1726 - accuracy: 0.6386 - val_loss: 0.7882 - val_accuracy: 0.7800 - 346ms/epoch - 5ms/step Epoch 60/500 72/72 - 0s - loss: 1.1643 - accuracy: 0.6372 - val_loss: 0.8063 - val_accuracy: 0.7710 - 335ms/epoch - 5ms/step Epoch 61/500 72/72 - 0s - loss: 1.1629 - accuracy: 0.6413 - val_loss: 0.7470 - val_accuracy: 0.7837 - 336ms/epoch - 5ms/step Epoch 62/500 72/72 - 0s - loss: 1.1476 - accuracy: 0.6452 - val_loss: 0.7311 - val_accuracy: 0.7940 - 340ms/epoch - 5ms/step Epoch 63/500 72/72 - 0s - loss: 1.1413 - accuracy: 0.6447 - val_loss: 0.7331 - val_accuracy: 0.7913 - 341ms/epoch - 5ms/step Epoch 64/500 72/72 - 0s - loss: 1.1287 - accuracy: 0.6497 - val_loss: 0.7604 - val_accuracy: 0.7853 - 340ms/epoch - 5ms/step Epoch 65/500 72/72 - 0s - loss: 1.1321 - accuracy: 0.6484 - val_loss: 0.7535 - val_accuracy: 0.7833 - 340ms/epoch - 5ms/step Epoch 66/500 72/72 - 0s - loss: 1.1339 - accuracy: 0.6507 - val_loss: 0.7170 - val_accuracy: 0.7893 - 346ms/epoch - 5ms/step Epoch 67/500 72/72 - 0s - loss: 1.1281 - accuracy: 0.6530 - val_loss: 0.7566 - val_accuracy: 0.7873 - 348ms/epoch - 5ms/step Epoch 68/500 72/72 - 0s - loss: 1.1110 - accuracy: 0.6602 - val_loss: 0.6820 - val_accuracy: 0.8150 - 342ms/epoch - 5ms/step Epoch 69/500 72/72 - 0s - loss: 1.1003 - accuracy: 0.6588 - val_loss: 0.7531 - val_accuracy: 0.7863 - 336ms/epoch - 5ms/step Epoch 70/500 72/72 - 0s - loss: 1.1038 - accuracy: 0.6567 - val_loss: 0.7339 - val_accuracy: 0.7953 - 340ms/epoch - 5ms/step Epoch 71/500 72/72 - 0s - loss: 1.0872 - accuracy: 0.6628 - val_loss: 0.7389 - val_accuracy: 0.7877 - 343ms/epoch - 5ms/step Epoch 72/500 72/72 - 0s - loss: 1.0846 - accuracy: 0.6659 - val_loss: 0.7366 - val_accuracy: 0.7873 - 337ms/epoch - 5ms/step Epoch 73/500 72/72 - 0s - loss: 1.0864 - accuracy: 0.6604 - val_loss: 0.6824 - val_accuracy: 0.8083 - 334ms/epoch - 5ms/step Epoch 74/500 72/72 - 0s - loss: 1.0821 - accuracy: 0.6640 - val_loss: 0.6918 - val_accuracy: 0.8097 - 338ms/epoch - 5ms/step Epoch 75/500 72/72 - 0s - loss: 1.0723 - accuracy: 0.6661 - val_loss: 0.7224 - val_accuracy: 0.7920 - 339ms/epoch - 5ms/step Epoch 76/500 72/72 - 0s - loss: 1.0836 - accuracy: 0.6654 - val_loss: 0.6751 - val_accuracy: 0.8107 - 339ms/epoch - 5ms/step Epoch 77/500 72/72 - 0s - loss: 1.0665 - accuracy: 0.6754 - val_loss: 0.6731 - val_accuracy: 0.8170 - 340ms/epoch - 5ms/step Epoch 78/500 72/72 - 0s - loss: 1.0588 - accuracy: 0.6763 - val_loss: 0.6638 - val_accuracy: 0.8143 - 338ms/epoch - 5ms/step Epoch 79/500 72/72 - 0s - loss: 1.0461 - accuracy: 0.6748 - val_loss: 0.6849 - val_accuracy: 0.8020 - 337ms/epoch - 5ms/step Epoch 80/500 72/72 - 0s - loss: 1.0473 - accuracy: 0.6766 - val_loss: 0.6619 - val_accuracy: 0.8103 - 340ms/epoch - 5ms/step Epoch 81/500 72/72 - 0s - loss: 1.0431 - accuracy: 0.6744 - val_loss: 0.7143 - val_accuracy: 0.7907 - 337ms/epoch - 5ms/step Epoch 82/500 72/72 - 0s - loss: 1.0439 - accuracy: 0.6768 - val_loss: 0.6507 - val_accuracy: 0.8170 - 342ms/epoch - 5ms/step Epoch 83/500 72/72 - 0s - loss: 1.0243 - accuracy: 0.6799 - val_loss: 0.6400 - val_accuracy: 0.8170 - 335ms/epoch - 5ms/step Epoch 84/500 72/72 - 0s - loss: 1.0476 - accuracy: 0.6813 - val_loss: 0.8161 - val_accuracy: 0.7650 - 340ms/epoch - 5ms/step Epoch 85/500 72/72 - 0s - loss: 1.0362 - accuracy: 0.6788 - val_loss: 0.6650 - val_accuracy: 0.8103 - 337ms/epoch - 5ms/step Epoch 86/500 72/72 - 0s - loss: 1.0130 - accuracy: 0.6880 - val_loss: 0.6254 - val_accuracy: 0.8193 - 337ms/epoch - 5ms/step Epoch 87/500 72/72 - 0s - loss: 1.0210 - accuracy: 0.6891 - val_loss: 0.6137 - val_accuracy: 0.8277 - 337ms/epoch - 5ms/step Epoch 88/500 72/72 - 0s - loss: 1.0164 - accuracy: 0.6877 - val_loss: 0.6086 - val_accuracy: 0.8323 - 338ms/epoch - 5ms/step Epoch 89/500 72/72 - 0s - loss: 0.9990 - accuracy: 0.6873 - val_loss: 0.6015 - val_accuracy: 0.8247 - 334ms/epoch - 5ms/step Epoch 90/500 72/72 - 0s - loss: 1.0065 - accuracy: 0.6931 - val_loss: 0.6650 - val_accuracy: 0.8087 - 336ms/epoch - 5ms/step Epoch 91/500 72/72 - 0s - loss: 0.9843 - accuracy: 0.6963 - val_loss: 0.6198 - val_accuracy: 0.8233 - 336ms/epoch - 5ms/step Epoch 92/500 72/72 - 0s - loss: 1.0065 - accuracy: 0.6956 - val_loss: 0.5978 - val_accuracy: 0.8313 - 344ms/epoch - 5ms/step Epoch 93/500 72/72 - 0s - loss: 0.9878 - accuracy: 0.6977 - val_loss: 0.6104 - val_accuracy: 0.8313 - 339ms/epoch - 5ms/step Epoch 94/500 72/72 - 0s - loss: 0.9800 - accuracy: 0.7002 - val_loss: 0.6061 - val_accuracy: 0.8303 - 332ms/epoch - 5ms/step Epoch 95/500 72/72 - 0s - loss: 0.9908 - accuracy: 0.6961 - val_loss: 0.6002 - val_accuracy: 0.8210 - 332ms/epoch - 5ms/step Epoch 96/500 72/72 - 0s - loss: 0.9845 - accuracy: 0.6985 - val_loss: 0.6078 - val_accuracy: 0.8243 - 335ms/epoch - 5ms/step Epoch 97/500 72/72 - 0s - loss: 0.9539 - accuracy: 0.7099 - val_loss: 0.6114 - val_accuracy: 0.8213 - 334ms/epoch - 5ms/step Epoch 98/500 72/72 - 0s - loss: 0.9698 - accuracy: 0.7039 - val_loss: 0.6155 - val_accuracy: 0.8250 - 335ms/epoch - 5ms/step Epoch 99/500 72/72 - 0s - loss: 0.9690 - accuracy: 0.6996 - val_loss: 0.6002 - val_accuracy: 0.8267 - 340ms/epoch - 5ms/step Epoch 100/500 72/72 - 0s - loss: 0.9628 - accuracy: 0.7030 - val_loss: 0.6210 - val_accuracy: 0.8227 - 338ms/epoch - 5ms/step Epoch 101/500 72/72 - 0s - loss: 0.9559 - accuracy: 0.7075 - val_loss: 0.5900 - val_accuracy: 0.8320 - 339ms/epoch - 5ms/step Epoch 102/500 72/72 - 0s - loss: 0.9557 - accuracy: 0.7068 - val_loss: 0.5938 - val_accuracy: 0.8300 - 335ms/epoch - 5ms/step Epoch 103/500 72/72 - 0s - loss: 0.9495 - accuracy: 0.7149 - val_loss: 0.6059 - val_accuracy: 0.8300 - 335ms/epoch - 5ms/step Epoch 104/500 72/72 - 0s - loss: 0.9471 - accuracy: 0.7109 - val_loss: 0.5730 - val_accuracy: 0.8393 - 337ms/epoch - 5ms/step Epoch 105/500 72/72 - 0s - loss: 0.9455 - accuracy: 0.7113 - val_loss: 0.5759 - val_accuracy: 0.8360 - 340ms/epoch - 5ms/step Epoch 106/500 72/72 - 0s - loss: 0.9455 - accuracy: 0.7108 - val_loss: 0.6316 - val_accuracy: 0.8143 - 345ms/epoch - 5ms/step Epoch 107/500 72/72 - 0s - loss: 0.9334 - accuracy: 0.7110 - val_loss: 0.5942 - val_accuracy: 0.8303 - 335ms/epoch - 5ms/step Epoch 108/500 72/72 - 0s - loss: 0.9313 - accuracy: 0.7143 - val_loss: 0.5608 - val_accuracy: 0.8370 - 337ms/epoch - 5ms/step Epoch 109/500 72/72 - 0s - loss: 0.9241 - accuracy: 0.7181 - val_loss: 0.6094 - val_accuracy: 0.8200 - 339ms/epoch - 5ms/step Epoch 110/500 72/72 - 0s - loss: 0.9245 - accuracy: 0.7190 - val_loss: 0.5999 - val_accuracy: 0.8230 - 346ms/epoch - 5ms/step Epoch 111/500 72/72 - 0s - loss: 0.9192 - accuracy: 0.7198 - val_loss: 0.5337 - val_accuracy: 0.8503 - 334ms/epoch - 5ms/step Epoch 112/500 72/72 - 0s - loss: 0.9203 - accuracy: 0.7155 - val_loss: 0.5610 - val_accuracy: 0.8370 - 336ms/epoch - 5ms/step Epoch 113/500 72/72 - 0s - loss: 0.9166 - accuracy: 0.7188 - val_loss: 0.5393 - val_accuracy: 0.8477 - 337ms/epoch - 5ms/step Epoch 114/500 72/72 - 0s - loss: 0.9284 - accuracy: 0.7122 - val_loss: 0.6185 - val_accuracy: 0.8207 - 338ms/epoch - 5ms/step Epoch 115/500 72/72 - 0s - loss: 0.9115 - accuracy: 0.7197 - val_loss: 0.5296 - val_accuracy: 0.8500 - 339ms/epoch - 5ms/step Epoch 116/500 72/72 - 0s - loss: 0.9105 - accuracy: 0.7208 - val_loss: 0.5362 - val_accuracy: 0.8490 - 336ms/epoch - 5ms/step Epoch 117/500 72/72 - 0s - loss: 0.9025 - accuracy: 0.7240 - val_loss: 0.5185 - val_accuracy: 0.8510 - 346ms/epoch - 5ms/step Epoch 118/500 72/72 - 0s - loss: 0.9111 - accuracy: 0.7170 - val_loss: 0.5648 - val_accuracy: 0.8393 - 347ms/epoch - 5ms/step Epoch 119/500 72/72 - 0s - loss: 0.8842 - accuracy: 0.7273 - val_loss: 0.5837 - val_accuracy: 0.8310 - 342ms/epoch - 5ms/step Epoch 120/500 72/72 - 0s - loss: 0.8891 - accuracy: 0.7283 - val_loss: 0.5885 - val_accuracy: 0.8277 - 336ms/epoch - 5ms/step Epoch 121/500 72/72 - 0s - loss: 0.9000 - accuracy: 0.7252 - val_loss: 0.5280 - val_accuracy: 0.8497 - 332ms/epoch - 5ms/step Epoch 122/500 72/72 - 0s - loss: 0.8901 - accuracy: 0.7299 - val_loss: 0.5315 - val_accuracy: 0.8513 - 344ms/epoch - 5ms/step Epoch 123/500 72/72 - 0s - loss: 0.8729 - accuracy: 0.7346 - val_loss: 0.5241 - val_accuracy: 0.8493 - 339ms/epoch - 5ms/step Epoch 124/500 72/72 - 0s - loss: 0.8707 - accuracy: 0.7351 - val_loss: 0.5344 - val_accuracy: 0.8500 - 336ms/epoch - 5ms/step Epoch 125/500 72/72 - 0s - loss: 0.8752 - accuracy: 0.7289 - val_loss: 0.5197 - val_accuracy: 0.8530 - 337ms/epoch - 5ms/step Epoch 126/500 72/72 - 0s - loss: 0.8708 - accuracy: 0.7356 - val_loss: 0.5151 - val_accuracy: 0.8547 - 343ms/epoch - 5ms/step Epoch 127/500 72/72 - 0s - loss: 0.8747 - accuracy: 0.7331 - val_loss: 0.5324 - val_accuracy: 0.8513 - 336ms/epoch - 5ms/step Epoch 128/500 72/72 - 0s - loss: 0.8727 - accuracy: 0.7343 - val_loss: 0.5243 - val_accuracy: 0.8543 - 340ms/epoch - 5ms/step Epoch 129/500 72/72 - 0s - loss: 0.8558 - accuracy: 0.7326 - val_loss: 0.5057 - val_accuracy: 0.8563 - 352ms/epoch - 5ms/step Epoch 130/500 72/72 - 0s - loss: 0.8689 - accuracy: 0.7355 - val_loss: 0.5791 - val_accuracy: 0.8347 - 347ms/epoch - 5ms/step Epoch 131/500 72/72 - 0s - loss: 0.8597 - accuracy: 0.7401 - val_loss: 0.4949 - val_accuracy: 0.8593 - 338ms/epoch - 5ms/step Epoch 132/500 72/72 - 0s - loss: 0.8559 - accuracy: 0.7377 - val_loss: 0.4985 - val_accuracy: 0.8657 - 331ms/epoch - 5ms/step Epoch 133/500 72/72 - 0s - loss: 0.8578 - accuracy: 0.7331 - val_loss: 0.5504 - val_accuracy: 0.8447 - 331ms/epoch - 5ms/step Epoch 134/500 72/72 - 0s - loss: 0.8542 - accuracy: 0.7393 - val_loss: 0.5193 - val_accuracy: 0.8597 - 336ms/epoch - 5ms/step Epoch 135/500 72/72 - 0s - loss: 0.8505 - accuracy: 0.7388 - val_loss: 0.5329 - val_accuracy: 0.8453 - 333ms/epoch - 5ms/step Epoch 136/500 72/72 - 0s - loss: 0.8426 - accuracy: 0.7439 - val_loss: 0.5255 - val_accuracy: 0.8507 - 338ms/epoch - 5ms/step Epoch 137/500 72/72 - 0s - loss: 0.8410 - accuracy: 0.7409 - val_loss: 0.4943 - val_accuracy: 0.8560 - 338ms/epoch - 5ms/step Epoch 138/500 72/72 - 0s - loss: 0.8361 - accuracy: 0.7421 - val_loss: 0.5016 - val_accuracy: 0.8613 - 334ms/epoch - 5ms/step Epoch 139/500 72/72 - 0s - loss: 0.8370 - accuracy: 0.7478 - val_loss: 0.4941 - val_accuracy: 0.8667 - 334ms/epoch - 5ms/step Epoch 140/500 72/72 - 0s - loss: 0.8358 - accuracy: 0.7435 - val_loss: 0.4815 - val_accuracy: 0.8633 - 338ms/epoch - 5ms/step Epoch 141/500 72/72 - 0s - loss: 0.8219 - accuracy: 0.7471 - val_loss: 0.5473 - val_accuracy: 0.8457 - 335ms/epoch - 5ms/step Epoch 142/500 72/72 - 0s - loss: 0.8308 - accuracy: 0.7496 - val_loss: 0.4887 - val_accuracy: 0.8627 - 336ms/epoch - 5ms/step Epoch 143/500 72/72 - 0s - loss: 0.8243 - accuracy: 0.7457 - val_loss: 0.5089 - val_accuracy: 0.8553 - 338ms/epoch - 5ms/step Epoch 144/500 72/72 - 0s - loss: 0.8294 - accuracy: 0.7463 - val_loss: 0.5139 - val_accuracy: 0.8530 - 338ms/epoch - 5ms/step Epoch 145/500 72/72 - 0s - loss: 0.8193 - accuracy: 0.7487 - val_loss: 0.5448 - val_accuracy: 0.8410 - 333ms/epoch - 5ms/step Epoch 146/500 72/72 - 0s - loss: 0.8117 - accuracy: 0.7535 - val_loss: 0.5226 - val_accuracy: 0.8513 - 339ms/epoch - 5ms/step Epoch 147/500 72/72 - 0s - loss: 0.8165 - accuracy: 0.7509 - val_loss: 0.4780 - val_accuracy: 0.8697 - 338ms/epoch - 5ms/step Epoch 148/500 72/72 - 0s - loss: 0.8050 - accuracy: 0.7558 - val_loss: 0.4989 - val_accuracy: 0.8550 - 341ms/epoch - 5ms/step Epoch 149/500 72/72 - 0s - loss: 0.8169 - accuracy: 0.7524 - val_loss: 0.4781 - val_accuracy: 0.8620 - 340ms/epoch - 5ms/step Epoch 150/500 72/72 - 0s - loss: 0.8166 - accuracy: 0.7465 - val_loss: 0.4865 - val_accuracy: 0.8650 - 359ms/epoch - 5ms/step Epoch 151/500 72/72 - 0s - loss: 0.8081 - accuracy: 0.7518 - val_loss: 0.4765 - val_accuracy: 0.8667 - 358ms/epoch - 5ms/step Epoch 152/500 72/72 - 0s - loss: 0.8130 - accuracy: 0.7472 - val_loss: 0.4913 - val_accuracy: 0.8640 - 393ms/epoch - 5ms/step Epoch 153/500 72/72 - 0s - loss: 0.8107 - accuracy: 0.7526 - val_loss: 0.4713 - val_accuracy: 0.8707 - 377ms/epoch - 5ms/step Epoch 154/500 72/72 - 0s - loss: 0.8099 - accuracy: 0.7525 - val_loss: 0.4643 - val_accuracy: 0.8733 - 389ms/epoch - 5ms/step Epoch 155/500 72/72 - 0s - loss: 0.7947 - accuracy: 0.7623 - val_loss: 0.4778 - val_accuracy: 0.8657 - 375ms/epoch - 5ms/step Epoch 156/500 72/72 - 0s - loss: 0.7978 - accuracy: 0.7565 - val_loss: 0.4876 - val_accuracy: 0.8620 - 361ms/epoch - 5ms/step Epoch 157/500 72/72 - 0s - loss: 0.7932 - accuracy: 0.7594 - val_loss: 0.4896 - val_accuracy: 0.8607 - 340ms/epoch - 5ms/step Epoch 158/500 72/72 - 0s - loss: 0.8044 - accuracy: 0.7556 - val_loss: 0.4685 - val_accuracy: 0.8690 - 334ms/epoch - 5ms/step Epoch 159/500 72/72 - 0s - loss: 0.7851 - accuracy: 0.7647 - val_loss: 0.4619 - val_accuracy: 0.8653 - 336ms/epoch - 5ms/step Epoch 160/500 72/72 - 0s - loss: 0.7921 - accuracy: 0.7550 - val_loss: 0.4522 - val_accuracy: 0.8747 - 339ms/epoch - 5ms/step Epoch 161/500 72/72 - 0s - loss: 0.7845 - accuracy: 0.7638 - val_loss: 0.5084 - val_accuracy: 0.8547 - 330ms/epoch - 5ms/step Epoch 162/500 72/72 - 0s - loss: 0.7923 - accuracy: 0.7585 - val_loss: 0.4751 - val_accuracy: 0.8670 - 333ms/epoch - 5ms/step Epoch 163/500 72/72 - 0s - loss: 0.7756 - accuracy: 0.7611 - val_loss: 0.4725 - val_accuracy: 0.8673 - 332ms/epoch - 5ms/step Epoch 164/500 72/72 - 0s - loss: 0.7628 - accuracy: 0.7681 - val_loss: 0.4493 - val_accuracy: 0.8733 - 334ms/epoch - 5ms/step Epoch 165/500 72/72 - 0s - loss: 0.7700 - accuracy: 0.7678 - val_loss: 0.4716 - val_accuracy: 0.8657 - 336ms/epoch - 5ms/step Epoch 166/500 72/72 - 0s - loss: 0.7761 - accuracy: 0.7644 - val_loss: 0.4630 - val_accuracy: 0.8720 - 336ms/epoch - 5ms/step Epoch 167/500 72/72 - 0s - loss: 0.7759 - accuracy: 0.7606 - val_loss: 0.4977 - val_accuracy: 0.8580 - 340ms/epoch - 5ms/step Epoch 168/500 72/72 - 0s - loss: 0.7722 - accuracy: 0.7657 - val_loss: 0.4526 - val_accuracy: 0.8733 - 338ms/epoch - 5ms/step Epoch 169/500 72/72 - 0s - loss: 0.7773 - accuracy: 0.7642 - val_loss: 0.5254 - val_accuracy: 0.8463 - 336ms/epoch - 5ms/step Epoch 170/500 72/72 - 0s - loss: 0.7909 - accuracy: 0.7619 - val_loss: 0.4545 - val_accuracy: 0.8710 - 343ms/epoch - 5ms/step Epoch 171/500 72/72 - 0s - loss: 0.7695 - accuracy: 0.7650 - val_loss: 0.4569 - val_accuracy: 0.8740 - 335ms/epoch - 5ms/step Epoch 172/500 72/72 - 0s - loss: 0.7680 - accuracy: 0.7640 - val_loss: 0.4779 - val_accuracy: 0.8607 - 336ms/epoch - 5ms/step Epoch 173/500 72/72 - 0s - loss: 0.7711 - accuracy: 0.7636 - val_loss: 0.4800 - val_accuracy: 0.8650 - 336ms/epoch - 5ms/step Epoch 174/500 72/72 - 0s - loss: 0.7552 - accuracy: 0.7664 - val_loss: 0.4535 - val_accuracy: 0.8707 - 337ms/epoch - 5ms/step Epoch 175/500 72/72 - 0s - loss: 0.7497 - accuracy: 0.7724 - val_loss: 0.4458 - val_accuracy: 0.8757 - 332ms/epoch - 5ms/step Epoch 176/500 72/72 - 0s - loss: 0.7511 - accuracy: 0.7721 - val_loss: 0.4450 - val_accuracy: 0.8780 - 334ms/epoch - 5ms/step Epoch 177/500 72/72 - 0s - loss: 0.7595 - accuracy: 0.7673 - val_loss: 0.4529 - val_accuracy: 0.8720 - 335ms/epoch - 5ms/step Epoch 178/500 72/72 - 0s - loss: 0.7588 - accuracy: 0.7733 - val_loss: 0.4469 - val_accuracy: 0.8783 - 332ms/epoch - 5ms/step Epoch 179/500 72/72 - 0s - loss: 0.7622 - accuracy: 0.7696 - val_loss: 0.4834 - val_accuracy: 0.8647 - 334ms/epoch - 5ms/step Epoch 180/500 72/72 - 0s - loss: 0.7517 - accuracy: 0.7732 - val_loss: 0.4876 - val_accuracy: 0.8633 - 337ms/epoch - 5ms/step Epoch 181/500 72/72 - 0s - loss: 0.7626 - accuracy: 0.7668 - val_loss: 0.4641 - val_accuracy: 0.8700 - 332ms/epoch - 5ms/step Epoch 182/500 72/72 - 0s - loss: 0.7504 - accuracy: 0.7680 - val_loss: 0.4561 - val_accuracy: 0.8723 - 335ms/epoch - 5ms/step Epoch 183/500 72/72 - 0s - loss: 0.7439 - accuracy: 0.7733 - val_loss: 0.4407 - val_accuracy: 0.8750 - 338ms/epoch - 5ms/step Epoch 184/500 72/72 - 0s - loss: 0.7495 - accuracy: 0.7692 - val_loss: 0.4405 - val_accuracy: 0.8740 - 333ms/epoch - 5ms/step Epoch 185/500 72/72 - 0s - loss: 0.7512 - accuracy: 0.7719 - val_loss: 0.4780 - val_accuracy: 0.8680 - 332ms/epoch - 5ms/step Epoch 186/500 72/72 - 0s - loss: 0.7370 - accuracy: 0.7766 - val_loss: 0.4403 - val_accuracy: 0.8737 - 339ms/epoch - 5ms/step Epoch 187/500 72/72 - 0s - loss: 0.7371 - accuracy: 0.7771 - val_loss: 0.4515 - val_accuracy: 0.8677 - 333ms/epoch - 5ms/step Epoch 188/500 72/72 - 0s - loss: 0.7429 - accuracy: 0.7747 - val_loss: 0.4279 - val_accuracy: 0.8803 - 335ms/epoch - 5ms/step Epoch 189/500 72/72 - 0s - loss: 0.7308 - accuracy: 0.7752 - val_loss: 0.4308 - val_accuracy: 0.8807 - 335ms/epoch - 5ms/step Epoch 190/500 72/72 - 0s - loss: 0.7228 - accuracy: 0.7764 - val_loss: 0.4494 - val_accuracy: 0.8680 - 333ms/epoch - 5ms/step Epoch 191/500 72/72 - 0s - loss: 0.7417 - accuracy: 0.7747 - val_loss: 0.4354 - val_accuracy: 0.8760 - 333ms/epoch - 5ms/step Epoch 192/500 72/72 - 0s - loss: 0.7471 - accuracy: 0.7735 - val_loss: 0.4620 - val_accuracy: 0.8683 - 334ms/epoch - 5ms/step Epoch 193/500 72/72 - 0s - loss: 0.7209 - accuracy: 0.7795 - val_loss: 0.4166 - val_accuracy: 0.8807 - 336ms/epoch - 5ms/step Epoch 194/500 72/72 - 0s - loss: 0.7272 - accuracy: 0.7807 - val_loss: 0.4288 - val_accuracy: 0.8790 - 335ms/epoch - 5ms/step Epoch 195/500 72/72 - 0s - loss: 0.7258 - accuracy: 0.7808 - val_loss: 0.4224 - val_accuracy: 0.8823 - 334ms/epoch - 5ms/step Epoch 196/500 72/72 - 0s - loss: 0.7083 - accuracy: 0.7858 - val_loss: 0.4347 - val_accuracy: 0.8777 - 338ms/epoch - 5ms/step Epoch 197/500 72/72 - 0s - loss: 0.7175 - accuracy: 0.7818 - val_loss: 0.4445 - val_accuracy: 0.8760 - 331ms/epoch - 5ms/step Epoch 198/500 72/72 - 0s - loss: 0.7142 - accuracy: 0.7807 - val_loss: 0.4546 - val_accuracy: 0.8717 - 339ms/epoch - 5ms/step Epoch 199/500 72/72 - 0s - loss: 0.7232 - accuracy: 0.7802 - val_loss: 0.4123 - val_accuracy: 0.8817 - 329ms/epoch - 5ms/step Epoch 200/500 72/72 - 0s - loss: 0.7129 - accuracy: 0.7818 - val_loss: 0.4366 - val_accuracy: 0.8750 - 335ms/epoch - 5ms/step Epoch 201/500 72/72 - 0s - loss: 0.7088 - accuracy: 0.7832 - val_loss: 0.4181 - val_accuracy: 0.8807 - 334ms/epoch - 5ms/step Epoch 202/500 72/72 - 0s - loss: 0.7390 - accuracy: 0.7757 - val_loss: 0.4475 - val_accuracy: 0.8680 - 332ms/epoch - 5ms/step Epoch 203/500 72/72 - 0s - loss: 0.7129 - accuracy: 0.7808 - val_loss: 0.4295 - val_accuracy: 0.8777 - 334ms/epoch - 5ms/step Epoch 204/500 72/72 - 0s - loss: 0.7144 - accuracy: 0.7766 - val_loss: 0.4364 - val_accuracy: 0.8750 - 344ms/epoch - 5ms/step Epoch 205/500 72/72 - 0s - loss: 0.7166 - accuracy: 0.7836 - val_loss: 0.4132 - val_accuracy: 0.8847 - 348ms/epoch - 5ms/step Epoch 206/500 72/72 - 0s - loss: 0.6978 - accuracy: 0.7900 - val_loss: 0.4341 - val_accuracy: 0.8760 - 345ms/epoch - 5ms/step Epoch 207/500 72/72 - 0s - loss: 0.7064 - accuracy: 0.7854 - val_loss: 0.4342 - val_accuracy: 0.8723 - 340ms/epoch - 5ms/step Epoch 208/500 72/72 - 0s - loss: 0.7029 - accuracy: 0.7834 - val_loss: 0.4578 - val_accuracy: 0.8693 - 330ms/epoch - 5ms/step Epoch 209/500 72/72 - 0s - loss: 0.7092 - accuracy: 0.7827 - val_loss: 0.4207 - val_accuracy: 0.8843 - 331ms/epoch - 5ms/step Epoch 210/500 72/72 - 0s - loss: 0.6812 - accuracy: 0.7897 - val_loss: 0.4425 - val_accuracy: 0.8753 - 336ms/epoch - 5ms/step Epoch 211/500 72/72 - 0s - loss: 0.6924 - accuracy: 0.7915 - val_loss: 0.4516 - val_accuracy: 0.8683 - 340ms/epoch - 5ms/step Epoch 212/500 72/72 - 0s - loss: 0.7017 - accuracy: 0.7859 - val_loss: 0.4337 - val_accuracy: 0.8753 - 331ms/epoch - 5ms/step Epoch 213/500 72/72 - 0s - loss: 0.6975 - accuracy: 0.7909 - val_loss: 0.4397 - val_accuracy: 0.8743 - 334ms/epoch - 5ms/step Epoch 214/500 72/72 - 0s - loss: 0.6913 - accuracy: 0.7876 - val_loss: 0.4135 - val_accuracy: 0.8853 - 335ms/epoch - 5ms/step Epoch 215/500 72/72 - 0s - loss: 0.6781 - accuracy: 0.7929 - val_loss: 0.4317 - val_accuracy: 0.8763 - 328ms/epoch - 5ms/step Epoch 216/500 72/72 - 0s - loss: 0.6689 - accuracy: 0.7962 - val_loss: 0.3954 - val_accuracy: 0.8913 - 338ms/epoch - 5ms/step Epoch 217/500 72/72 - 0s - loss: 0.6972 - accuracy: 0.7876 - val_loss: 0.4153 - val_accuracy: 0.8850 - 331ms/epoch - 5ms/step Epoch 218/500 72/72 - 0s - loss: 0.6892 - accuracy: 0.7904 - val_loss: 0.4775 - val_accuracy: 0.8617 - 338ms/epoch - 5ms/step Epoch 219/500 72/72 - 0s - loss: 0.6814 - accuracy: 0.7957 - val_loss: 0.4073 - val_accuracy: 0.8870 - 348ms/epoch - 5ms/step Epoch 220/500 72/72 - 0s - loss: 0.6853 - accuracy: 0.7893 - val_loss: 0.4204 - val_accuracy: 0.8810 - 350ms/epoch - 5ms/step Epoch 221/500 72/72 - 0s - loss: 0.6789 - accuracy: 0.7964 - val_loss: 0.4051 - val_accuracy: 0.8893 - 333ms/epoch - 5ms/step Epoch 222/500 72/72 - 0s - loss: 0.6888 - accuracy: 0.7879 - val_loss: 0.4002 - val_accuracy: 0.8897 - 333ms/epoch - 5ms/step Epoch 223/500 72/72 - 0s - loss: 0.6864 - accuracy: 0.7917 - val_loss: 0.4091 - val_accuracy: 0.8870 - 342ms/epoch - 5ms/step Epoch 224/500 72/72 - 0s - loss: 0.6768 - accuracy: 0.7946 - val_loss: 0.4228 - val_accuracy: 0.8843 - 338ms/epoch - 5ms/step Epoch 225/500 72/72 - 0s - loss: 0.6778 - accuracy: 0.7943 - val_loss: 0.4291 - val_accuracy: 0.8793 - 344ms/epoch - 5ms/step Epoch 226/500 72/72 - 0s - loss: 0.6950 - accuracy: 0.7889 - val_loss: 0.3989 - val_accuracy: 0.8877 - 333ms/epoch - 5ms/step Epoch 227/500 72/72 - 0s - loss: 0.6693 - accuracy: 0.7958 - val_loss: 0.4324 - val_accuracy: 0.8857 - 333ms/epoch - 5ms/step Epoch 228/500 72/72 - 0s - loss: 0.6660 - accuracy: 0.7995 - val_loss: 0.4045 - val_accuracy: 0.8920 - 339ms/epoch - 5ms/step Epoch 229/500 72/72 - 0s - loss: 0.6728 - accuracy: 0.7952 - val_loss: 0.4227 - val_accuracy: 0.8817 - 336ms/epoch - 5ms/step Epoch 230/500 72/72 - 0s - loss: 0.6700 - accuracy: 0.7973 - val_loss: 0.4015 - val_accuracy: 0.8890 - 337ms/epoch - 5ms/step Epoch 231/500 72/72 - 0s - loss: 0.6584 - accuracy: 0.7988 - val_loss: 0.4725 - val_accuracy: 0.8647 - 334ms/epoch - 5ms/step Epoch 232/500 72/72 - 0s - loss: 0.6606 - accuracy: 0.7983 - val_loss: 0.3925 - val_accuracy: 0.8930 - 339ms/epoch - 5ms/step Epoch 233/500 72/72 - 0s - loss: 0.6538 - accuracy: 0.8003 - val_loss: 0.4072 - val_accuracy: 0.8873 - 332ms/epoch - 5ms/step Epoch 234/500 72/72 - 0s - loss: 0.6524 - accuracy: 0.8007 - val_loss: 0.3954 - val_accuracy: 0.8900 - 338ms/epoch - 5ms/step Epoch 235/500 72/72 - 0s - loss: 0.6669 - accuracy: 0.7971 - val_loss: 0.4252 - val_accuracy: 0.8833 - 346ms/epoch - 5ms/step Epoch 236/500 72/72 - 0s - loss: 0.6493 - accuracy: 0.8002 - val_loss: 0.3902 - val_accuracy: 0.8940 - 334ms/epoch - 5ms/step Epoch 237/500 72/72 - 0s - loss: 0.6495 - accuracy: 0.8022 - val_loss: 0.3852 - val_accuracy: 0.8933 - 332ms/epoch - 5ms/step Epoch 238/500 72/72 - 0s - loss: 0.6489 - accuracy: 0.8041 - val_loss: 0.4008 - val_accuracy: 0.8873 - 334ms/epoch - 5ms/step Epoch 239/500 72/72 - 0s - loss: 0.6613 - accuracy: 0.7994 - val_loss: 0.4012 - val_accuracy: 0.8903 - 340ms/epoch - 5ms/step Epoch 240/500 72/72 - 0s - loss: 0.6545 - accuracy: 0.7990 - val_loss: 0.3955 - val_accuracy: 0.8897 - 335ms/epoch - 5ms/step Epoch 241/500 72/72 - 0s - loss: 0.6496 - accuracy: 0.8026 - val_loss: 0.4108 - val_accuracy: 0.8853 - 330ms/epoch - 5ms/step Epoch 242/500 72/72 - 0s - loss: 0.6618 - accuracy: 0.7982 - val_loss: 0.4781 - val_accuracy: 0.8660 - 333ms/epoch - 5ms/step Epoch 243/500 72/72 - 0s - loss: 0.6491 - accuracy: 0.8033 - val_loss: 0.4714 - val_accuracy: 0.8693 - 332ms/epoch - 5ms/step Epoch 244/500 72/72 - 0s - loss: 0.6519 - accuracy: 0.8019 - val_loss: 0.4491 - val_accuracy: 0.8700 - 329ms/epoch - 5ms/step Epoch 245/500 72/72 - 0s - loss: 0.6471 - accuracy: 0.8036 - val_loss: 0.4049 - val_accuracy: 0.8850 - 338ms/epoch - 5ms/step Epoch 246/500 72/72 - 0s - loss: 0.6534 - accuracy: 0.7991 - val_loss: 0.4061 - val_accuracy: 0.8863 - 346ms/epoch - 5ms/step Epoch 247/500 72/72 - 0s - loss: 0.6498 - accuracy: 0.8012 - val_loss: 0.3845 - val_accuracy: 0.8953 - 339ms/epoch - 5ms/step Epoch 248/500 72/72 - 0s - loss: 0.6463 - accuracy: 0.8043 - val_loss: 0.3885 - val_accuracy: 0.8927 - 334ms/epoch - 5ms/step Epoch 249/500 72/72 - 0s - loss: 0.6302 - accuracy: 0.8067 - val_loss: 0.3882 - val_accuracy: 0.8900 - 338ms/epoch - 5ms/step Epoch 250/500 72/72 - 0s - loss: 0.6633 - accuracy: 0.8016 - val_loss: 0.4032 - val_accuracy: 0.8917 - 331ms/epoch - 5ms/step Epoch 251/500 72/72 - 0s - loss: 0.6400 - accuracy: 0.8062 - val_loss: 0.4279 - val_accuracy: 0.8857 - 336ms/epoch - 5ms/step Epoch 252/500 72/72 - 0s - loss: 0.6434 - accuracy: 0.8054 - val_loss: 0.3861 - val_accuracy: 0.8947 - 337ms/epoch - 5ms/step Epoch 253/500 72/72 - 0s - loss: 0.6367 - accuracy: 0.8068 - val_loss: 0.3884 - val_accuracy: 0.8950 - 335ms/epoch - 5ms/step Epoch 254/500 72/72 - 0s - loss: 0.6478 - accuracy: 0.8052 - val_loss: 0.4331 - val_accuracy: 0.8750 - 338ms/epoch - 5ms/step Epoch 255/500 72/72 - 0s - loss: 0.6314 - accuracy: 0.8087 - val_loss: 0.4187 - val_accuracy: 0.8813 - 335ms/epoch - 5ms/step Epoch 256/500 72/72 - 0s - loss: 0.6420 - accuracy: 0.8033 - val_loss: 0.3778 - val_accuracy: 0.8960 - 330ms/epoch - 5ms/step Epoch 257/500 72/72 - 0s - loss: 0.6462 - accuracy: 0.8066 - val_loss: 0.4100 - val_accuracy: 0.8873 - 330ms/epoch - 5ms/step Epoch 258/500 72/72 - 0s - loss: 0.6350 - accuracy: 0.8038 - val_loss: 0.4180 - val_accuracy: 0.8867 - 336ms/epoch - 5ms/step Epoch 259/500 72/72 - 0s - loss: 0.6324 - accuracy: 0.8067 - val_loss: 0.4232 - val_accuracy: 0.8763 - 329ms/epoch - 5ms/step Epoch 260/500 72/72 - 0s - loss: 0.6276 - accuracy: 0.8080 - val_loss: 0.3898 - val_accuracy: 0.8890 - 340ms/epoch - 5ms/step Epoch 261/500 72/72 - 0s - loss: 0.6385 - accuracy: 0.8077 - val_loss: 0.3711 - val_accuracy: 0.8987 - 335ms/epoch - 5ms/step Epoch 262/500 72/72 - 0s - loss: 0.6255 - accuracy: 0.8075 - val_loss: 0.4137 - val_accuracy: 0.8843 - 326ms/epoch - 5ms/step Epoch 263/500 72/72 - 0s - loss: 0.6363 - accuracy: 0.8033 - val_loss: 0.3841 - val_accuracy: 0.8977 - 334ms/epoch - 5ms/step Epoch 264/500 72/72 - 0s - loss: 0.6345 - accuracy: 0.8094 - val_loss: 0.4131 - val_accuracy: 0.8847 - 332ms/epoch - 5ms/step Epoch 265/500 72/72 - 0s - loss: 0.6032 - accuracy: 0.8186 - val_loss: 0.4141 - val_accuracy: 0.8857 - 328ms/epoch - 5ms/step Epoch 266/500 72/72 - 0s - loss: 0.6469 - accuracy: 0.8022 - val_loss: 0.4006 - val_accuracy: 0.8930 - 332ms/epoch - 5ms/step Epoch 267/500 72/72 - 0s - loss: 0.6235 - accuracy: 0.8065 - val_loss: 0.3859 - val_accuracy: 0.8963 - 326ms/epoch - 5ms/step Epoch 268/500 72/72 - 0s - loss: 0.6253 - accuracy: 0.8077 - val_loss: 0.3833 - val_accuracy: 0.8960 - 329ms/epoch - 5ms/step Epoch 269/500 72/72 - 0s - loss: 0.6271 - accuracy: 0.8097 - val_loss: 0.3697 - val_accuracy: 0.8950 - 333ms/epoch - 5ms/step Epoch 270/500 72/72 - 0s - loss: 0.6275 - accuracy: 0.8091 - val_loss: 0.3821 - val_accuracy: 0.8947 - 330ms/epoch - 5ms/step Epoch 271/500 72/72 - 0s - loss: 0.6385 - accuracy: 0.8071 - val_loss: 0.3830 - val_accuracy: 0.8970 - 329ms/epoch - 5ms/step Epoch 272/500 72/72 - 0s - loss: 0.6109 - accuracy: 0.8153 - val_loss: 0.3901 - val_accuracy: 0.8903 - 329ms/epoch - 5ms/step Epoch 273/500 72/72 - 0s - loss: 0.6256 - accuracy: 0.8119 - val_loss: 0.3839 - val_accuracy: 0.8910 - 330ms/epoch - 5ms/step Epoch 274/500 72/72 - 0s - loss: 0.6166 - accuracy: 0.8144 - val_loss: 0.3706 - val_accuracy: 0.8937 - 327ms/epoch - 5ms/step Epoch 275/500 72/72 - 0s - loss: 0.6226 - accuracy: 0.8132 - val_loss: 0.3920 - val_accuracy: 0.8920 - 335ms/epoch - 5ms/step Epoch 276/500 72/72 - 0s - loss: 0.6146 - accuracy: 0.8108 - val_loss: 0.3692 - val_accuracy: 0.9020 - 328ms/epoch - 5ms/step Epoch 277/500 72/72 - 0s - loss: 0.6178 - accuracy: 0.8089 - val_loss: 0.3815 - val_accuracy: 0.8970 - 326ms/epoch - 5ms/step Epoch 278/500 72/72 - 0s - loss: 0.6074 - accuracy: 0.8158 - val_loss: 0.3688 - val_accuracy: 0.8960 - 330ms/epoch - 5ms/step Epoch 279/500 72/72 - 0s - loss: 0.6242 - accuracy: 0.8099 - val_loss: 0.3752 - val_accuracy: 0.8940 - 329ms/epoch - 5ms/step Epoch 280/500 72/72 - 0s - loss: 0.6296 - accuracy: 0.8091 - val_loss: 0.3619 - val_accuracy: 0.8977 - 328ms/epoch - 5ms/step Epoch 281/500 72/72 - 0s - loss: 0.6219 - accuracy: 0.8103 - val_loss: 0.3879 - val_accuracy: 0.8917 - 328ms/epoch - 5ms/step Epoch 282/500 72/72 - 0s - loss: 0.6067 - accuracy: 0.8160 - val_loss: 0.3682 - val_accuracy: 0.8977 - 331ms/epoch - 5ms/step Epoch 283/500 72/72 - 0s - loss: 0.6125 - accuracy: 0.8151 - val_loss: 0.3917 - val_accuracy: 0.8887 - 331ms/epoch - 5ms/step Epoch 284/500 72/72 - 0s - loss: 0.6218 - accuracy: 0.8164 - val_loss: 0.3856 - val_accuracy: 0.8930 - 333ms/epoch - 5ms/step Epoch 285/500 72/72 - 0s - loss: 0.6364 - accuracy: 0.8094 - val_loss: 0.4113 - val_accuracy: 0.8853 - 345ms/epoch - 5ms/step Epoch 286/500 72/72 - 0s - loss: 0.6128 - accuracy: 0.8126 - val_loss: 0.3759 - val_accuracy: 0.8953 - 331ms/epoch - 5ms/step Epoch 287/500 72/72 - 0s - loss: 0.6114 - accuracy: 0.8163 - val_loss: 0.3887 - val_accuracy: 0.8930 - 339ms/epoch - 5ms/step Epoch 288/500 72/72 - 0s - loss: 0.6086 - accuracy: 0.8143 - val_loss: 0.3771 - val_accuracy: 0.8940 - 333ms/epoch - 5ms/step Epoch 289/500 72/72 - 0s - loss: 0.6041 - accuracy: 0.8152 - val_loss: 0.3807 - val_accuracy: 0.8970 - 334ms/epoch - 5ms/step Epoch 290/500 72/72 - 0s - loss: 0.6049 - accuracy: 0.8167 - val_loss: 0.4332 - val_accuracy: 0.8807 - 336ms/epoch - 5ms/step Epoch 291/500 72/72 - 0s - loss: 0.6073 - accuracy: 0.8182 - val_loss: 0.3536 - val_accuracy: 0.9050 - 333ms/epoch - 5ms/step Epoch 292/500 72/72 - 0s - loss: 0.6036 - accuracy: 0.8186 - val_loss: 0.3832 - val_accuracy: 0.8960 - 334ms/epoch - 5ms/step Epoch 293/500 72/72 - 0s - loss: 0.6091 - accuracy: 0.8132 - val_loss: 0.3579 - val_accuracy: 0.8980 - 346ms/epoch - 5ms/step Epoch 294/500 72/72 - 0s - loss: 0.5925 - accuracy: 0.8179 - val_loss: 0.3860 - val_accuracy: 0.8943 - 347ms/epoch - 5ms/step Epoch 295/500 72/72 - 0s - loss: 0.6217 - accuracy: 0.8119 - val_loss: 0.3715 - val_accuracy: 0.8947 - 345ms/epoch - 5ms/step Epoch 296/500 72/72 - 0s - loss: 0.6062 - accuracy: 0.8177 - val_loss: 0.3909 - val_accuracy: 0.8917 - 336ms/epoch - 5ms/step Epoch 297/500 72/72 - 0s - loss: 0.5970 - accuracy: 0.8212 - val_loss: 0.3723 - val_accuracy: 0.9003 - 331ms/epoch - 5ms/step Epoch 298/500 72/72 - 0s - loss: 0.5974 - accuracy: 0.8209 - val_loss: 0.3535 - val_accuracy: 0.9040 - 342ms/epoch - 5ms/step Epoch 299/500 72/72 - 0s - loss: 0.6105 - accuracy: 0.8124 - val_loss: 0.3523 - val_accuracy: 0.9057 - 339ms/epoch - 5ms/step Epoch 300/500 72/72 - 0s - loss: 0.6047 - accuracy: 0.8190 - val_loss: 0.3556 - val_accuracy: 0.9007 - 331ms/epoch - 5ms/step Epoch 301/500 72/72 - 0s - loss: 0.5882 - accuracy: 0.8183 - val_loss: 0.3590 - val_accuracy: 0.9040 - 330ms/epoch - 5ms/step Epoch 302/500 72/72 - 0s - loss: 0.5922 - accuracy: 0.8218 - val_loss: 0.3661 - val_accuracy: 0.8993 - 336ms/epoch - 5ms/step Epoch 303/500 72/72 - 0s - loss: 0.6057 - accuracy: 0.8147 - val_loss: 0.3976 - val_accuracy: 0.8900 - 329ms/epoch - 5ms/step Epoch 304/500 72/72 - 0s - loss: 0.5849 - accuracy: 0.8223 - val_loss: 0.3857 - val_accuracy: 0.8927 - 335ms/epoch - 5ms/step Epoch 305/500 72/72 - 0s - loss: 0.5890 - accuracy: 0.8219 - val_loss: 0.3718 - val_accuracy: 0.8980 - 334ms/epoch - 5ms/step Epoch 306/500 72/72 - 0s - loss: 0.5917 - accuracy: 0.8195 - val_loss: 0.3713 - val_accuracy: 0.9017 - 333ms/epoch - 5ms/step Epoch 307/500 72/72 - 0s - loss: 0.5932 - accuracy: 0.8185 - val_loss: 0.3599 - val_accuracy: 0.8997 - 334ms/epoch - 5ms/step Epoch 308/500 72/72 - 0s - loss: 0.5891 - accuracy: 0.8200 - val_loss: 0.3549 - val_accuracy: 0.9053 - 332ms/epoch - 5ms/step Epoch 309/500 72/72 - 0s - loss: 0.5943 - accuracy: 0.8159 - val_loss: 0.3785 - val_accuracy: 0.8930 - 335ms/epoch - 5ms/step Epoch 310/500 72/72 - 0s - loss: 0.6011 - accuracy: 0.8175 - val_loss: 0.4059 - val_accuracy: 0.8830 - 343ms/epoch - 5ms/step Epoch 311/500 72/72 - 0s - loss: 0.5832 - accuracy: 0.8239 - val_loss: 0.3577 - val_accuracy: 0.8963 - 351ms/epoch - 5ms/step Epoch 312/500 72/72 - 0s - loss: 0.5880 - accuracy: 0.8212 - val_loss: 0.4083 - val_accuracy: 0.8843 - 341ms/epoch - 5ms/step Epoch 313/500 72/72 - 0s - loss: 0.5826 - accuracy: 0.8212 - val_loss: 0.3734 - val_accuracy: 0.8947 - 340ms/epoch - 5ms/step Epoch 314/500 72/72 - 0s - loss: 0.6045 - accuracy: 0.8146 - val_loss: 0.4916 - val_accuracy: 0.8587 - 336ms/epoch - 5ms/step Epoch 315/500 72/72 - 0s - loss: 0.5830 - accuracy: 0.8242 - val_loss: 0.3622 - val_accuracy: 0.8977 - 330ms/epoch - 5ms/step Epoch 316/500 72/72 - 0s - loss: 0.5902 - accuracy: 0.8232 - val_loss: 0.3595 - val_accuracy: 0.8993 - 335ms/epoch - 5ms/step Epoch 317/500 72/72 - 0s - loss: 0.5789 - accuracy: 0.8218 - val_loss: 0.3754 - val_accuracy: 0.8917 - 332ms/epoch - 5ms/step Epoch 318/500 72/72 - 0s - loss: 0.5949 - accuracy: 0.8192 - val_loss: 0.3717 - val_accuracy: 0.8973 - 333ms/epoch - 5ms/step Epoch 319/500 72/72 - 0s - loss: 0.5654 - accuracy: 0.8272 - val_loss: 0.3529 - val_accuracy: 0.9023 - 341ms/epoch - 5ms/step Epoch 320/500 72/72 - 0s - loss: 0.5703 - accuracy: 0.8312 - val_loss: 0.3741 - val_accuracy: 0.8933 - 334ms/epoch - 5ms/step Epoch 321/500 72/72 - 0s - loss: 0.5863 - accuracy: 0.8209 - val_loss: 0.3988 - val_accuracy: 0.8883 - 331ms/epoch - 5ms/step Epoch 322/500 72/72 - 0s - loss: 0.5821 - accuracy: 0.8258 - val_loss: 0.3602 - val_accuracy: 0.9003 - 332ms/epoch - 5ms/step Epoch 323/500 72/72 - 0s - loss: 0.5871 - accuracy: 0.8212 - val_loss: 0.3540 - val_accuracy: 0.9020 - 331ms/epoch - 5ms/step Epoch 324/500 72/72 - 0s - loss: 0.5847 - accuracy: 0.8215 - val_loss: 0.3520 - val_accuracy: 0.9050 - 334ms/epoch - 5ms/step Epoch 325/500 72/72 - 0s - loss: 0.5711 - accuracy: 0.8269 - val_loss: 0.3423 - val_accuracy: 0.9070 - 348ms/epoch - 5ms/step Epoch 326/500 72/72 - 0s - loss: 0.5794 - accuracy: 0.8223 - val_loss: 0.3462 - val_accuracy: 0.9040 - 354ms/epoch - 5ms/step Epoch 327/500 72/72 - 0s - loss: 0.5777 - accuracy: 0.8250 - val_loss: 0.3609 - val_accuracy: 0.9000 - 359ms/epoch - 5ms/step Epoch 328/500 72/72 - 0s - loss: 0.5740 - accuracy: 0.8305 - val_loss: 0.3776 - val_accuracy: 0.8947 - 352ms/epoch - 5ms/step Epoch 329/500 72/72 - 0s - loss: 0.5676 - accuracy: 0.8293 - val_loss: 0.3715 - val_accuracy: 0.8957 - 348ms/epoch - 5ms/step Epoch 330/500 72/72 - 0s - loss: 0.5643 - accuracy: 0.8299 - val_loss: 0.3570 - val_accuracy: 0.8993 - 347ms/epoch - 5ms/step Epoch 331/500 72/72 - 0s - loss: 0.5717 - accuracy: 0.8268 - val_loss: 0.3863 - val_accuracy: 0.8963 - 355ms/epoch - 5ms/step Epoch 332/500 72/72 - 0s - loss: 0.5897 - accuracy: 0.8237 - val_loss: 0.3694 - val_accuracy: 0.8980 - 343ms/epoch - 5ms/step Epoch 333/500 72/72 - 0s - loss: 0.5787 - accuracy: 0.8255 - val_loss: 0.3417 - val_accuracy: 0.9100 - 342ms/epoch - 5ms/step Epoch 334/500 72/72 - 0s - loss: 0.5751 - accuracy: 0.8273 - val_loss: 0.3491 - val_accuracy: 0.9037 - 348ms/epoch - 5ms/step Epoch 335/500 72/72 - 0s - loss: 0.5894 - accuracy: 0.8217 - val_loss: 0.3765 - val_accuracy: 0.8970 - 351ms/epoch - 5ms/step Epoch 336/500 72/72 - 0s - loss: 0.5783 - accuracy: 0.8251 - val_loss: 0.3992 - val_accuracy: 0.8920 - 349ms/epoch - 5ms/step Epoch 337/500 72/72 - 0s - loss: 0.5724 - accuracy: 0.8274 - val_loss: 0.4380 - val_accuracy: 0.8737 - 339ms/epoch - 5ms/step Epoch 338/500 72/72 - 0s - loss: 0.5635 - accuracy: 0.8267 - val_loss: 0.3615 - val_accuracy: 0.9030 - 336ms/epoch - 5ms/step Epoch 339/500 72/72 - 0s - loss: 0.5752 - accuracy: 0.8257 - val_loss: 0.3931 - val_accuracy: 0.8897 - 337ms/epoch - 5ms/step Epoch 340/500 72/72 - 0s - loss: 0.5758 - accuracy: 0.8275 - val_loss: 0.3544 - val_accuracy: 0.8993 - 337ms/epoch - 5ms/step Epoch 341/500 72/72 - 0s - loss: 0.5610 - accuracy: 0.8273 - val_loss: 0.3652 - val_accuracy: 0.8977 - 335ms/epoch - 5ms/step Epoch 342/500 72/72 - 0s - loss: 0.5621 - accuracy: 0.8284 - val_loss: 0.3487 - val_accuracy: 0.9023 - 337ms/epoch - 5ms/step Epoch 343/500 72/72 - 0s - loss: 0.5591 - accuracy: 0.8314 - val_loss: 0.3518 - val_accuracy: 0.9000 - 334ms/epoch - 5ms/step Epoch 344/500 72/72 - 0s - loss: 0.5631 - accuracy: 0.8314 - val_loss: 0.3589 - val_accuracy: 0.9010 - 341ms/epoch - 5ms/step Epoch 345/500 72/72 - 0s - loss: 0.5486 - accuracy: 0.8323 - val_loss: 0.4059 - val_accuracy: 0.8847 - 333ms/epoch - 5ms/step Epoch 346/500 72/72 - 0s - loss: 0.5562 - accuracy: 0.8327 - val_loss: 0.3570 - val_accuracy: 0.8993 - 336ms/epoch - 5ms/step Epoch 347/500 72/72 - 0s - loss: 0.5551 - accuracy: 0.8323 - val_loss: 0.3466 - val_accuracy: 0.9023 - 340ms/epoch - 5ms/step Epoch 348/500 72/72 - 0s - loss: 0.5624 - accuracy: 0.8324 - val_loss: 0.3613 - val_accuracy: 0.8983 - 338ms/epoch - 5ms/step Epoch 349/500 72/72 - 0s - loss: 0.5660 - accuracy: 0.8309 - val_loss: 0.3703 - val_accuracy: 0.8960 - 335ms/epoch - 5ms/step Epoch 350/500 72/72 - 0s - loss: 0.5583 - accuracy: 0.8279 - val_loss: 0.3557 - val_accuracy: 0.8957 - 335ms/epoch - 5ms/step Epoch 351/500 72/72 - 0s - loss: 0.5595 - accuracy: 0.8290 - val_loss: 0.3618 - val_accuracy: 0.8967 - 337ms/epoch - 5ms/step Epoch 352/500 72/72 - 0s - loss: 0.5579 - accuracy: 0.8297 - val_loss: 0.3622 - val_accuracy: 0.9000 - 332ms/epoch - 5ms/step Epoch 353/500 72/72 - 0s - loss: 0.5492 - accuracy: 0.8357 - val_loss: 0.3854 - val_accuracy: 0.8940 - 332ms/epoch - 5ms/step Epoch 354/500 72/72 - 0s - loss: 0.5530 - accuracy: 0.8312 - val_loss: 0.3485 - val_accuracy: 0.9057 - 335ms/epoch - 5ms/step Epoch 355/500 72/72 - 0s - loss: 0.5505 - accuracy: 0.8297 - val_loss: 0.3818 - val_accuracy: 0.8963 - 328ms/epoch - 5ms/step Epoch 356/500 72/72 - 0s - loss: 0.5457 - accuracy: 0.8362 - val_loss: 0.4048 - val_accuracy: 0.8823 - 333ms/epoch - 5ms/step Epoch 357/500 72/72 - 0s - loss: 0.5845 - accuracy: 0.8241 - val_loss: 0.3644 - val_accuracy: 0.8973 - 341ms/epoch - 5ms/step Epoch 358/500 72/72 - 0s - loss: 0.5550 - accuracy: 0.8307 - val_loss: 0.3589 - val_accuracy: 0.9023 - 329ms/epoch - 5ms/step Epoch 359/500 72/72 - 0s - loss: 0.5438 - accuracy: 0.8369 - val_loss: 0.3784 - val_accuracy: 0.8963 - 333ms/epoch - 5ms/step Epoch 360/500 72/72 - 0s - loss: 0.5621 - accuracy: 0.8302 - val_loss: 0.3624 - val_accuracy: 0.8990 - 334ms/epoch - 5ms/step Epoch 361/500 72/72 - 0s - loss: 0.5664 - accuracy: 0.8302 - val_loss: 0.3833 - val_accuracy: 0.8957 - 331ms/epoch - 5ms/step Epoch 362/500 72/72 - 0s - loss: 0.5510 - accuracy: 0.8346 - val_loss: 0.3919 - val_accuracy: 0.8870 - 333ms/epoch - 5ms/step Epoch 363/500 72/72 - 0s - loss: 0.5402 - accuracy: 0.8340 - val_loss: 0.3730 - val_accuracy: 0.8920 - 339ms/epoch - 5ms/step Epoch 364/500 72/72 - 0s - loss: 0.5573 - accuracy: 0.8281 - val_loss: 0.3618 - val_accuracy: 0.9000 - 333ms/epoch - 5ms/step Epoch 365/500 72/72 - 0s - loss: 0.5524 - accuracy: 0.8302 - val_loss: 0.3437 - val_accuracy: 0.9047 - 333ms/epoch - 5ms/step Epoch 366/500 72/72 - 0s - loss: 0.5482 - accuracy: 0.8314 - val_loss: 0.3628 - val_accuracy: 0.9003 - 341ms/epoch - 5ms/step Epoch 367/500 72/72 - 0s - loss: 0.5405 - accuracy: 0.8370 - val_loss: 0.3479 - val_accuracy: 0.9047 - 334ms/epoch - 5ms/step Epoch 368/500 72/72 - 0s - loss: 0.5650 - accuracy: 0.8293 - val_loss: 0.3730 - val_accuracy: 0.8970 - 333ms/epoch - 5ms/step Epoch 369/500 72/72 - 0s - loss: 0.5666 - accuracy: 0.8279 - val_loss: 0.3588 - val_accuracy: 0.9047 - 337ms/epoch - 5ms/step Epoch 370/500 72/72 - 0s - loss: 0.5441 - accuracy: 0.8336 - val_loss: 0.3820 - val_accuracy: 0.8953 - 330ms/epoch - 5ms/step Epoch 371/500 72/72 - 0s - loss: 0.5476 - accuracy: 0.8330 - val_loss: 0.3661 - val_accuracy: 0.9007 - 336ms/epoch - 5ms/step Epoch 372/500 72/72 - 0s - loss: 0.5461 - accuracy: 0.8341 - val_loss: 0.3865 - val_accuracy: 0.8910 - 338ms/epoch - 5ms/step Epoch 373/500 72/72 - 0s - loss: 0.5483 - accuracy: 0.8343 - val_loss: 0.3516 - val_accuracy: 0.9020 - 332ms/epoch - 5ms/step Epoch 374/500 72/72 - 0s - loss: 0.5507 - accuracy: 0.8334 - val_loss: 0.3431 - val_accuracy: 0.9023 - 330ms/epoch - 5ms/step Epoch 375/500 72/72 - 0s - loss: 0.5505 - accuracy: 0.8347 - val_loss: 0.3578 - val_accuracy: 0.9020 - 334ms/epoch - 5ms/step Epoch 376/500 72/72 - 0s - loss: 0.5474 - accuracy: 0.8355 - val_loss: 0.3643 - val_accuracy: 0.9033 - 338ms/epoch - 5ms/step Epoch 377/500 72/72 - 0s - loss: 0.5451 - accuracy: 0.8334 - val_loss: 0.3847 - val_accuracy: 0.8923 - 336ms/epoch - 5ms/step Epoch 378/500 72/72 - 0s - loss: 0.5344 - accuracy: 0.8393 - val_loss: 0.3270 - val_accuracy: 0.9063 - 336ms/epoch - 5ms/step Epoch 379/500 72/72 - 0s - loss: 0.5440 - accuracy: 0.8351 - val_loss: 0.3471 - val_accuracy: 0.9023 - 331ms/epoch - 5ms/step Epoch 380/500 72/72 - 0s - loss: 0.5449 - accuracy: 0.8365 - val_loss: 0.3350 - val_accuracy: 0.9067 - 333ms/epoch - 5ms/step Epoch 381/500 72/72 - 0s - loss: 0.5507 - accuracy: 0.8318 - val_loss: 0.3886 - val_accuracy: 0.8920 - 346ms/epoch - 5ms/step Epoch 382/500 72/72 - 0s - loss: 0.5338 - accuracy: 0.8413 - val_loss: 0.3512 - val_accuracy: 0.9017 - 341ms/epoch - 5ms/step Epoch 383/500 72/72 - 0s - loss: 0.5331 - accuracy: 0.8395 - val_loss: 0.3432 - val_accuracy: 0.9050 - 340ms/epoch - 5ms/step Epoch 384/500 72/72 - 0s - loss: 0.5300 - accuracy: 0.8371 - val_loss: 0.3719 - val_accuracy: 0.8953 - 340ms/epoch - 5ms/step Epoch 385/500 72/72 - 0s - loss: 0.5351 - accuracy: 0.8355 - val_loss: 0.3595 - val_accuracy: 0.8990 - 335ms/epoch - 5ms/step Epoch 386/500 72/72 - 0s - loss: 0.5366 - accuracy: 0.8413 - val_loss: 0.3400 - val_accuracy: 0.9063 - 338ms/epoch - 5ms/step Epoch 387/500 72/72 - 0s - loss: 0.5378 - accuracy: 0.8371 - val_loss: 0.3592 - val_accuracy: 0.8953 - 339ms/epoch - 5ms/step Epoch 388/500 72/72 - 0s - loss: 0.5516 - accuracy: 0.8325 - val_loss: 0.3988 - val_accuracy: 0.8867 - 335ms/epoch - 5ms/step Epoch 389/500 72/72 - 0s - loss: 0.5333 - accuracy: 0.8406 - val_loss: 0.3321 - val_accuracy: 0.9047 - 336ms/epoch - 5ms/step Epoch 390/500 72/72 - 0s - loss: 0.5442 - accuracy: 0.8331 - val_loss: 0.3677 - val_accuracy: 0.8967 - 336ms/epoch - 5ms/step Epoch 391/500 72/72 - 0s - loss: 0.5546 - accuracy: 0.8311 - val_loss: 0.3646 - val_accuracy: 0.8970 - 333ms/epoch - 5ms/step Epoch 392/500 72/72 - 0s - loss: 0.5373 - accuracy: 0.8363 - val_loss: 0.3492 - val_accuracy: 0.9000 - 329ms/epoch - 5ms/step Epoch 393/500 72/72 - 0s - loss: 0.5278 - accuracy: 0.8415 - val_loss: 0.3611 - val_accuracy: 0.8993 - 335ms/epoch - 5ms/step Epoch 394/500 72/72 - 0s - loss: 0.5265 - accuracy: 0.8407 - val_loss: 0.3490 - val_accuracy: 0.9063 - 335ms/epoch - 5ms/step Epoch 395/500 72/72 - 0s - loss: 0.5451 - accuracy: 0.8363 - val_loss: 0.3451 - val_accuracy: 0.9063 - 336ms/epoch - 5ms/step Epoch 396/500 72/72 - 0s - loss: 0.5366 - accuracy: 0.8378 - val_loss: 0.3562 - val_accuracy: 0.9007 - 339ms/epoch - 5ms/step Epoch 397/500 72/72 - 0s - loss: 0.5273 - accuracy: 0.8408 - val_loss: 0.3593 - val_accuracy: 0.9010 - 336ms/epoch - 5ms/step Epoch 398/500 72/72 - 0s - loss: 0.5210 - accuracy: 0.8437 - val_loss: 0.3347 - val_accuracy: 0.9043 - 333ms/epoch - 5ms/step Epoch 399/500 72/72 - 0s - loss: 0.5307 - accuracy: 0.8385 - val_loss: 0.3858 - val_accuracy: 0.8933 - 335ms/epoch - 5ms/step Epoch 400/500 72/72 - 0s - loss: 0.5456 - accuracy: 0.8343 - val_loss: 0.3531 - val_accuracy: 0.8993 - 345ms/epoch - 5ms/step Epoch 401/500 72/72 - 0s - loss: 0.5457 - accuracy: 0.8377 - val_loss: 0.3421 - val_accuracy: 0.9047 - 355ms/epoch - 5ms/step Epoch 402/500 72/72 - 0s - loss: 0.5309 - accuracy: 0.8367 - val_loss: 0.3503 - val_accuracy: 0.9023 - 352ms/epoch - 5ms/step Epoch 403/500 72/72 - 0s - loss: 0.5335 - accuracy: 0.8385 - val_loss: 0.3741 - val_accuracy: 0.8917 - 336ms/epoch - 5ms/step Epoch 404/500 72/72 - 0s - loss: 0.5276 - accuracy: 0.8405 - val_loss: 0.3366 - val_accuracy: 0.9027 - 337ms/epoch - 5ms/step Epoch 405/500 72/72 - 0s - loss: 0.5402 - accuracy: 0.8368 - val_loss: 0.3708 - val_accuracy: 0.8933 - 337ms/epoch - 5ms/step Epoch 406/500 72/72 - 0s - loss: 0.5345 - accuracy: 0.8399 - val_loss: 0.3565 - val_accuracy: 0.9010 - 335ms/epoch - 5ms/step Epoch 407/500 72/72 - 0s - loss: 0.5286 - accuracy: 0.8413 - val_loss: 0.3379 - val_accuracy: 0.9123 - 342ms/epoch - 5ms/step Epoch 408/500 72/72 - 0s - loss: 0.5341 - accuracy: 0.8394 - val_loss: 0.3637 - val_accuracy: 0.8947 - 333ms/epoch - 5ms/step Epoch 409/500 72/72 - 0s - loss: 0.5251 - accuracy: 0.8415 - val_loss: 0.3376 - val_accuracy: 0.9053 - 337ms/epoch - 5ms/step Epoch 410/500 72/72 - 0s - loss: 0.5362 - accuracy: 0.8417 - val_loss: 0.3910 - val_accuracy: 0.8883 - 337ms/epoch - 5ms/step Epoch 411/500 72/72 - 0s - loss: 0.5324 - accuracy: 0.8361 - val_loss: 0.3565 - val_accuracy: 0.9027 - 331ms/epoch - 5ms/step Epoch 412/500 72/72 - 0s - loss: 0.5272 - accuracy: 0.8417 - val_loss: 0.3376 - val_accuracy: 0.8990 - 346ms/epoch - 5ms/step Epoch 413/500 72/72 - 0s - loss: 0.5340 - accuracy: 0.8383 - val_loss: 0.3610 - val_accuracy: 0.8993 - 337ms/epoch - 5ms/step Epoch 414/500 72/72 - 0s - loss: 0.5230 - accuracy: 0.8416 - val_loss: 0.3491 - val_accuracy: 0.9010 - 331ms/epoch - 5ms/step Epoch 415/500 72/72 - 0s - loss: 0.5436 - accuracy: 0.8401 - val_loss: 0.3718 - val_accuracy: 0.8953 - 337ms/epoch - 5ms/step Epoch 416/500 72/72 - 0s - loss: 0.5220 - accuracy: 0.8429 - val_loss: 0.3379 - val_accuracy: 0.9047 - 340ms/epoch - 5ms/step Epoch 417/500 72/72 - 0s - loss: 0.5339 - accuracy: 0.8394 - val_loss: 0.3262 - val_accuracy: 0.9083 - 333ms/epoch - 5ms/step Epoch 418/500 72/72 - 0s - loss: 0.5341 - accuracy: 0.8371 - val_loss: 0.3549 - val_accuracy: 0.8977 - 334ms/epoch - 5ms/step Epoch 419/500 72/72 - 0s - loss: 0.5359 - accuracy: 0.8407 - val_loss: 0.3700 - val_accuracy: 0.8947 - 334ms/epoch - 5ms/step Epoch 420/500 72/72 - 0s - loss: 0.5354 - accuracy: 0.8385 - val_loss: 0.3778 - val_accuracy: 0.8917 - 333ms/epoch - 5ms/step Epoch 421/500 72/72 - 0s - loss: 0.5263 - accuracy: 0.8413 - val_loss: 0.3770 - val_accuracy: 0.8960 - 332ms/epoch - 5ms/step Epoch 422/500 72/72 - 0s - loss: 0.5385 - accuracy: 0.8378 - val_loss: 0.3474 - val_accuracy: 0.9043 - 338ms/epoch - 5ms/step Epoch 423/500 72/72 - 0s - loss: 0.5173 - accuracy: 0.8430 - val_loss: 0.3372 - val_accuracy: 0.9073 - 331ms/epoch - 5ms/step Epoch 424/500 72/72 - 0s - loss: 0.5262 - accuracy: 0.8433 - val_loss: 0.3446 - val_accuracy: 0.9037 - 335ms/epoch - 5ms/step Epoch 425/500 72/72 - 0s - loss: 0.5224 - accuracy: 0.8425 - val_loss: 0.3546 - val_accuracy: 0.8977 - 335ms/epoch - 5ms/step Epoch 426/500 72/72 - 0s - loss: 0.5241 - accuracy: 0.8414 - val_loss: 0.3748 - val_accuracy: 0.8893 - 344ms/epoch - 5ms/step Epoch 427/500 72/72 - 0s - loss: 0.5078 - accuracy: 0.8440 - val_loss: 0.3818 - val_accuracy: 0.8933 - 337ms/epoch - 5ms/step Epoch 428/500 72/72 - 0s - loss: 0.5253 - accuracy: 0.8401 - val_loss: 0.3831 - val_accuracy: 0.8897 - 333ms/epoch - 5ms/step Epoch 429/500 72/72 - 0s - loss: 0.5350 - accuracy: 0.8378 - val_loss: 0.3421 - val_accuracy: 0.9057 - 329ms/epoch - 5ms/step Epoch 430/500 72/72 - 0s - loss: 0.5178 - accuracy: 0.8430 - val_loss: 0.3395 - val_accuracy: 0.9070 - 341ms/epoch - 5ms/step Epoch 431/500 72/72 - 0s - loss: 0.5148 - accuracy: 0.8429 - val_loss: 0.4147 - val_accuracy: 0.8860 - 335ms/epoch - 5ms/step Epoch 432/500 72/72 - 0s - loss: 0.5162 - accuracy: 0.8413 - val_loss: 0.3460 - val_accuracy: 0.8997 - 335ms/epoch - 5ms/step Epoch 433/500 72/72 - 0s - loss: 0.5234 - accuracy: 0.8463 - val_loss: 0.3741 - val_accuracy: 0.8940 - 329ms/epoch - 5ms/step Epoch 434/500 72/72 - 0s - loss: 0.5266 - accuracy: 0.8398 - val_loss: 0.3547 - val_accuracy: 0.9010 - 332ms/epoch - 5ms/step Epoch 435/500 72/72 - 0s - loss: 0.5140 - accuracy: 0.8475 - val_loss: 0.3305 - val_accuracy: 0.9070 - 329ms/epoch - 5ms/step Epoch 436/500 72/72 - 0s - loss: 0.5222 - accuracy: 0.8397 - val_loss: 0.3691 - val_accuracy: 0.8950 - 334ms/epoch - 5ms/step Epoch 437/500 72/72 - 0s - loss: 0.5006 - accuracy: 0.8473 - val_loss: 0.3323 - val_accuracy: 0.9053 - 336ms/epoch - 5ms/step Epoch 438/500 72/72 - 0s - loss: 0.5241 - accuracy: 0.8389 - val_loss: 0.3652 - val_accuracy: 0.8987 - 335ms/epoch - 5ms/step Epoch 439/500 72/72 - 0s - loss: 0.5041 - accuracy: 0.8451 - val_loss: 0.3768 - val_accuracy: 0.8897 - 332ms/epoch - 5ms/step Epoch 440/500 72/72 - 0s - loss: 0.5109 - accuracy: 0.8459 - val_loss: 0.3755 - val_accuracy: 0.8967 - 335ms/epoch - 5ms/step Epoch 441/500 72/72 - 0s - loss: 0.5167 - accuracy: 0.8434 - val_loss: 0.3397 - val_accuracy: 0.9017 - 331ms/epoch - 5ms/step Epoch 442/500 72/72 - 0s - loss: 0.5180 - accuracy: 0.8454 - val_loss: 0.3473 - val_accuracy: 0.9053 - 332ms/epoch - 5ms/step Epoch 443/500 72/72 - 0s - loss: 0.5081 - accuracy: 0.8480 - val_loss: 0.3648 - val_accuracy: 0.8973 - 333ms/epoch - 5ms/step Epoch 444/500 72/72 - 0s - loss: 0.5066 - accuracy: 0.8414 - val_loss: 0.3972 - val_accuracy: 0.8953 - 332ms/epoch - 5ms/step Epoch 445/500 72/72 - 0s - loss: 0.5103 - accuracy: 0.8454 - val_loss: 0.3418 - val_accuracy: 0.9070 - 334ms/epoch - 5ms/step Epoch 446/500 72/72 - 0s - loss: 0.5185 - accuracy: 0.8454 - val_loss: 0.3535 - val_accuracy: 0.8997 - 339ms/epoch - 5ms/step Epoch 447/500 72/72 - 0s - loss: 0.5050 - accuracy: 0.8489 - val_loss: 0.3422 - val_accuracy: 0.9020 - 331ms/epoch - 5ms/step Epoch 448/500 72/72 - 0s - loss: 0.5129 - accuracy: 0.8470 - val_loss: 0.3421 - val_accuracy: 0.9067 - 333ms/epoch - 5ms/step Epoch 449/500 72/72 - 0s - loss: 0.5049 - accuracy: 0.8505 - val_loss: 0.3264 - val_accuracy: 0.9083 - 331ms/epoch - 5ms/step Epoch 450/500 72/72 - 0s - loss: 0.4957 - accuracy: 0.8493 - val_loss: 0.3411 - val_accuracy: 0.9057 - 332ms/epoch - 5ms/step Epoch 451/500 72/72 - 0s - loss: 0.5005 - accuracy: 0.8461 - val_loss: 0.3622 - val_accuracy: 0.8983 - 331ms/epoch - 5ms/step Epoch 452/500 72/72 - 0s - loss: 0.5030 - accuracy: 0.8461 - val_loss: 0.3333 - val_accuracy: 0.9057 - 336ms/epoch - 5ms/step Epoch 453/500 72/72 - 0s - loss: 0.5070 - accuracy: 0.8479 - val_loss: 0.3503 - val_accuracy: 0.8997 - 337ms/epoch - 5ms/step Epoch 454/500 72/72 - 0s - loss: 0.5115 - accuracy: 0.8477 - val_loss: 0.3915 - val_accuracy: 0.8953 - 333ms/epoch - 5ms/step Epoch 455/500 72/72 - 0s - loss: 0.5087 - accuracy: 0.8475 - val_loss: 0.3825 - val_accuracy: 0.8910 - 334ms/epoch - 5ms/step Epoch 456/500 72/72 - 0s - loss: 0.5036 - accuracy: 0.8529 - val_loss: 0.3369 - val_accuracy: 0.9083 - 335ms/epoch - 5ms/step Epoch 457/500 72/72 - 0s - loss: 0.5074 - accuracy: 0.8491 - val_loss: 0.3486 - val_accuracy: 0.9033 - 331ms/epoch - 5ms/step Epoch 458/500 72/72 - 0s - loss: 0.4946 - accuracy: 0.8506 - val_loss: 0.3211 - val_accuracy: 0.9123 - 334ms/epoch - 5ms/step Epoch 459/500 72/72 - 0s - loss: 0.5131 - accuracy: 0.8493 - val_loss: 0.3611 - val_accuracy: 0.8993 - 336ms/epoch - 5ms/step Epoch 460/500 72/72 - 0s - loss: 0.4907 - accuracy: 0.8519 - val_loss: 0.3910 - val_accuracy: 0.8937 - 336ms/epoch - 5ms/step Epoch 461/500 72/72 - 0s - loss: 0.4968 - accuracy: 0.8491 - val_loss: 0.3307 - val_accuracy: 0.9080 - 336ms/epoch - 5ms/step Epoch 462/500 72/72 - 0s - loss: 0.5010 - accuracy: 0.8497 - val_loss: 0.3287 - val_accuracy: 0.9107 - 341ms/epoch - 5ms/step Epoch 463/500 72/72 - 0s - loss: 0.5178 - accuracy: 0.8434 - val_loss: 0.3365 - val_accuracy: 0.9033 - 341ms/epoch - 5ms/step Epoch 464/500 72/72 - 0s - loss: 0.5022 - accuracy: 0.8511 - val_loss: 0.3561 - val_accuracy: 0.9020 - 348ms/epoch - 5ms/step Epoch 465/500 72/72 - 0s - loss: 0.4911 - accuracy: 0.8510 - val_loss: 0.3247 - val_accuracy: 0.9077 - 340ms/epoch - 5ms/step Epoch 466/500 72/72 - 0s - loss: 0.4982 - accuracy: 0.8564 - val_loss: 0.3295 - val_accuracy: 0.9067 - 335ms/epoch - 5ms/step Epoch 467/500 72/72 - 0s - loss: 0.5065 - accuracy: 0.8468 - val_loss: 0.3295 - val_accuracy: 0.9080 - 334ms/epoch - 5ms/step Epoch 468/500 72/72 - 0s - loss: 0.5195 - accuracy: 0.8417 - val_loss: 0.3313 - val_accuracy: 0.9110 - 332ms/epoch - 5ms/step Epoch 469/500 72/72 - 0s - loss: 0.5045 - accuracy: 0.8473 - val_loss: 0.3649 - val_accuracy: 0.8963 - 343ms/epoch - 5ms/step Epoch 470/500 72/72 - 0s - loss: 0.5163 - accuracy: 0.8451 - val_loss: 0.3658 - val_accuracy: 0.8960 - 344ms/epoch - 5ms/step Epoch 471/500 72/72 - 0s - loss: 0.5011 - accuracy: 0.8492 - val_loss: 0.3865 - val_accuracy: 0.8923 - 353ms/epoch - 5ms/step Epoch 472/500 72/72 - 0s - loss: 0.4943 - accuracy: 0.8498 - val_loss: 0.3388 - val_accuracy: 0.9030 - 353ms/epoch - 5ms/step Epoch 473/500 72/72 - 0s - loss: 0.5028 - accuracy: 0.8468 - val_loss: 0.3158 - val_accuracy: 0.9117 - 351ms/epoch - 5ms/step Epoch 474/500 72/72 - 0s - loss: 0.4908 - accuracy: 0.8537 - val_loss: 0.3534 - val_accuracy: 0.9043 - 334ms/epoch - 5ms/step Epoch 475/500 72/72 - 0s - loss: 0.4906 - accuracy: 0.8512 - val_loss: 0.3330 - val_accuracy: 0.9103 - 339ms/epoch - 5ms/step Epoch 476/500 72/72 - 0s - loss: 0.5028 - accuracy: 0.8480 - val_loss: 0.3570 - val_accuracy: 0.8993 - 334ms/epoch - 5ms/step Epoch 477/500 72/72 - 0s - loss: 0.4985 - accuracy: 0.8523 - val_loss: 0.3799 - val_accuracy: 0.8963 - 330ms/epoch - 5ms/step Epoch 478/500 72/72 - 0s - loss: 0.5088 - accuracy: 0.8505 - val_loss: 0.3423 - val_accuracy: 0.9060 - 331ms/epoch - 5ms/step Epoch 479/500 72/72 - 0s - loss: 0.5036 - accuracy: 0.8465 - val_loss: 0.3572 - val_accuracy: 0.9003 - 341ms/epoch - 5ms/step Epoch 480/500 72/72 - 0s - loss: 0.5024 - accuracy: 0.8493 - val_loss: 0.3336 - val_accuracy: 0.9050 - 330ms/epoch - 5ms/step Epoch 481/500 72/72 - 0s - loss: 0.5048 - accuracy: 0.8488 - val_loss: 0.3337 - val_accuracy: 0.9077 - 337ms/epoch - 5ms/step Epoch 482/500 72/72 - 0s - loss: 0.4901 - accuracy: 0.8481 - val_loss: 0.3681 - val_accuracy: 0.9000 - 332ms/epoch - 5ms/step Epoch 483/500 72/72 - 0s - loss: 0.4894 - accuracy: 0.8517 - val_loss: 0.3548 - val_accuracy: 0.9023 - 334ms/epoch - 5ms/step Epoch 484/500 72/72 - 0s - loss: 0.4929 - accuracy: 0.8510 - val_loss: 0.3314 - val_accuracy: 0.9093 - 338ms/epoch - 5ms/step Epoch 485/500 72/72 - 0s - loss: 0.5060 - accuracy: 0.8495 - val_loss: 0.3212 - val_accuracy: 0.9090 - 333ms/epoch - 5ms/step Epoch 486/500 72/72 - 0s - loss: 0.5005 - accuracy: 0.8533 - val_loss: 0.3299 - val_accuracy: 0.9110 - 330ms/epoch - 5ms/step Epoch 487/500 72/72 - 0s - loss: 0.4920 - accuracy: 0.8511 - val_loss: 0.3399 - val_accuracy: 0.9060 - 333ms/epoch - 5ms/step Epoch 488/500 72/72 - 0s - loss: 0.5058 - accuracy: 0.8498 - val_loss: 0.3771 - val_accuracy: 0.8950 - 334ms/epoch - 5ms/step Epoch 489/500 72/72 - 0s - loss: 0.4895 - accuracy: 0.8555 - val_loss: 0.3204 - val_accuracy: 0.9167 - 338ms/epoch - 5ms/step Epoch 490/500 72/72 - 0s - loss: 0.4857 - accuracy: 0.8538 - val_loss: 0.3270 - val_accuracy: 0.9080 - 338ms/epoch - 5ms/step Epoch 491/500 72/72 - 0s - loss: 0.4907 - accuracy: 0.8526 - val_loss: 0.3619 - val_accuracy: 0.8993 - 345ms/epoch - 5ms/step Epoch 492/500 72/72 - 0s - loss: 0.4803 - accuracy: 0.8568 - val_loss: 0.3284 - val_accuracy: 0.9093 - 349ms/epoch - 5ms/step Epoch 493/500 72/72 - 0s - loss: 0.4898 - accuracy: 0.8510 - val_loss: 0.3562 - val_accuracy: 0.9010 - 340ms/epoch - 5ms/step Epoch 494/500 72/72 - 0s - loss: 0.5065 - accuracy: 0.8457 - val_loss: 0.3462 - val_accuracy: 0.9043 - 334ms/epoch - 5ms/step Epoch 495/500 72/72 - 0s - loss: 0.4863 - accuracy: 0.8530 - val_loss: 0.3346 - val_accuracy: 0.9067 - 332ms/epoch - 5ms/step Epoch 496/500 72/72 - 0s - loss: 0.4928 - accuracy: 0.8513 - val_loss: 0.3517 - val_accuracy: 0.8983 - 329ms/epoch - 5ms/step Epoch 497/500 72/72 - 0s - loss: 0.4858 - accuracy: 0.8546 - val_loss: 0.3175 - val_accuracy: 0.9123 - 327ms/epoch - 5ms/step Epoch 498/500 72/72 - 0s - loss: 0.4799 - accuracy: 0.8576 - val_loss: 0.3241 - val_accuracy: 0.9147 - 330ms/epoch - 5ms/step Epoch 499/500 72/72 - 0s - loss: 0.4939 - accuracy: 0.8510 - val_loss: 0.3314 - val_accuracy: 0.9090 - 327ms/epoch - 5ms/step Epoch 500/500 72/72 - 0s - loss: 0.5008 - accuracy: 0.8512 - val_loss: 0.3271 - val_accuracy: 0.9087 - 328ms/epoch - 5ms/step 94/94 - 0s - loss: 0.3271 - accuracy: 0.9087 - 329ms/epoch - 3ms/step Baseline Error: 9.13%
df31augmentationcompare
| Model Name | Train Accuracy | test Accuracy | Val Loss | Val Loss | History | |
|---|---|---|---|---|---|---|
| 0 | With Class Weights | 0.854120 | 0.873000 | 0.475335 | 0.482661 | <keras.callbacks.History object at 0x7f5a147cb... |
| 1 | With Augmented Engineered Data | 0.851239 | 0.908667 | 0.500761 | 0.327107 | <keras.callbacks.History object at 0x7f594325f... |
With Augmented Engineered Data outperforms With Class Weights in terms of accuracy and loss on both training and test sets. It appears to be a more effective model based on these metrics.
#save the new "best" model
model.save('Goat31Model.h5')
#to be e xtra safe we can use the test data as well just to make sure
augmentmodel = models.load_model('Goat31Model.h5')
#evaluate the model on the test set
score = augmentmodel.evaluate(X_test31, test_labels, verbose=0)
print("Test loss:", score[0])
print("Test accuracy:", score[1])
Test loss: 0.32786089181900024 Test accuracy: 0.9136666655540466
The Augment Model has the highest accuracy as well as the lowest test loss out of all the other potential "best" models. This is likely due to the data augmentation being effective, one thing we should note is that the augmented model does take slightly longer to converge fully compared to the other models.
Model seems to train better off of the Augmented data, and it reflects on the accuracy when tested on the test data. We should note that the model does take longer to converge which could also imply that the increased accuracy/better loss could be a result of the increase in train data size instead of the balancing being better than class weights
unlike the 31x31 model, the 128x128 model has more data to work with as the images are more complex, we will have more space to work with as we can add more layers to the models. However we will try to implement the same techniques as we did with the 31x31 model and see if there are any improvements. If you have already forgotten, the initial additions for the 31x31 model are as follows:
Hopefully this will allow the model to fully converge and learn the data, while preventing overfitting.
#original model
# model_128 = models.Sequential([
# layers.Conv2D(32, (3, 3), activation='relu', input_shape=(128, 128, 1)),
# layers.MaxPooling2D((2, 2)),
# layers.Conv2D(64, (3, 3), activation='relu'),
# layers.MaxPooling2D((2, 2)),
# layers.Conv2D(64, (3, 3), activation='relu'),
# layers.Flatten(),
# layers.Dense(64, activation='relu'),
# layers.Dense(15, activation='softmax')
# ])
model_128regularized = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(128, 128, 1)),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)), #add reg
layers.Dense(15, activation='softmax')
])
model_128regularized.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model_128regularized.fit(X_train128, train_labels, validation_data=(X_val128, validation_labels),
epochs=100, batch_size=32, verbose=2, class_weight = class_weight) #add epochs
scores = model_128regularized.evaluate(X_val128, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df128.loc[len(df128)] = ['RegularizedModel', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/100 283/283 - 4s - loss: 2.5538 - accuracy: 0.2790 - val_loss: 2.0602 - val_accuracy: 0.3870 - 4s/epoch - 15ms/step Epoch 2/100 283/283 - 2s - loss: 1.7480 - accuracy: 0.5305 - val_loss: 1.6805 - val_accuracy: 0.5227 - 2s/epoch - 7ms/step Epoch 3/100 283/283 - 2s - loss: 1.3068 - accuracy: 0.6648 - val_loss: 1.4288 - val_accuracy: 0.6210 - 2s/epoch - 7ms/step Epoch 4/100 283/283 - 2s - loss: 1.0143 - accuracy: 0.7576 - val_loss: 1.2674 - val_accuracy: 0.6910 - 2s/epoch - 7ms/step Epoch 5/100 283/283 - 2s - loss: 0.8400 - accuracy: 0.8248 - val_loss: 1.3225 - val_accuracy: 0.6980 - 2s/epoch - 7ms/step Epoch 6/100 283/283 - 2s - loss: 0.6999 - accuracy: 0.8638 - val_loss: 1.2777 - val_accuracy: 0.7170 - 2s/epoch - 7ms/step Epoch 7/100 283/283 - 2s - loss: 0.5963 - accuracy: 0.9066 - val_loss: 1.2821 - val_accuracy: 0.7347 - 2s/epoch - 7ms/step Epoch 8/100 283/283 - 2s - loss: 0.5215 - accuracy: 0.9257 - val_loss: 1.5631 - val_accuracy: 0.6697 - 2s/epoch - 7ms/step Epoch 9/100 283/283 - 2s - loss: 0.5480 - accuracy: 0.9220 - val_loss: 1.2904 - val_accuracy: 0.7500 - 2s/epoch - 7ms/step Epoch 10/100 283/283 - 2s - loss: 0.5201 - accuracy: 0.9319 - val_loss: 1.4601 - val_accuracy: 0.7230 - 2s/epoch - 7ms/step Epoch 11/100 283/283 - 2s - loss: 0.5228 - accuracy: 0.9359 - val_loss: 1.5287 - val_accuracy: 0.7307 - 2s/epoch - 7ms/step Epoch 12/100 283/283 - 2s - loss: 0.4293 - accuracy: 0.9648 - val_loss: 1.4915 - val_accuracy: 0.7357 - 2s/epoch - 7ms/step Epoch 13/100 283/283 - 2s - loss: 0.4765 - accuracy: 0.9481 - val_loss: 1.3625 - val_accuracy: 0.7560 - 2s/epoch - 7ms/step Epoch 14/100 283/283 - 2s - loss: 0.4062 - accuracy: 0.9670 - val_loss: 1.4288 - val_accuracy: 0.7610 - 2s/epoch - 7ms/step Epoch 15/100 283/283 - 2s - loss: 0.4940 - accuracy: 0.9422 - val_loss: 1.5420 - val_accuracy: 0.7347 - 2s/epoch - 7ms/step Epoch 16/100 283/283 - 2s - loss: 0.4413 - accuracy: 0.9634 - val_loss: 1.4767 - val_accuracy: 0.7543 - 2s/epoch - 7ms/step Epoch 17/100 283/283 - 2s - loss: 0.4189 - accuracy: 0.9679 - val_loss: 1.4947 - val_accuracy: 0.7603 - 2s/epoch - 7ms/step Epoch 18/100 283/283 - 2s - loss: 0.4102 - accuracy: 0.9684 - val_loss: 1.7956 - val_accuracy: 0.6977 - 2s/epoch - 7ms/step Epoch 19/100 283/283 - 2s - loss: 0.4197 - accuracy: 0.9646 - val_loss: 2.1524 - val_accuracy: 0.6247 - 2s/epoch - 7ms/step Epoch 20/100 283/283 - 2s - loss: 0.3724 - accuracy: 0.9763 - val_loss: 1.5943 - val_accuracy: 0.7467 - 2s/epoch - 7ms/step Epoch 21/100 283/283 - 2s - loss: 0.4009 - accuracy: 0.9613 - val_loss: 1.7093 - val_accuracy: 0.7303 - 2s/epoch - 7ms/step Epoch 22/100 283/283 - 2s - loss: 0.4843 - accuracy: 0.9503 - val_loss: 1.5897 - val_accuracy: 0.7340 - 2s/epoch - 7ms/step Epoch 23/100 283/283 - 2s - loss: 0.3886 - accuracy: 0.9765 - val_loss: 1.7632 - val_accuracy: 0.7200 - 2s/epoch - 7ms/step Epoch 24/100 283/283 - 2s - loss: 0.3477 - accuracy: 0.9838 - val_loss: 1.5582 - val_accuracy: 0.7530 - 2s/epoch - 7ms/step Epoch 25/100 283/283 - 2s - loss: 0.3439 - accuracy: 0.9778 - val_loss: 1.6767 - val_accuracy: 0.7283 - 2s/epoch - 7ms/step Epoch 26/100 283/283 - 2s - loss: 0.4595 - accuracy: 0.9569 - val_loss: 1.5120 - val_accuracy: 0.7577 - 2s/epoch - 7ms/step Epoch 27/100 283/283 - 2s - loss: 0.3989 - accuracy: 0.9744 - val_loss: 1.7129 - val_accuracy: 0.7280 - 2s/epoch - 7ms/step Epoch 28/100 283/283 - 2s - loss: 0.3663 - accuracy: 0.9783 - val_loss: 1.6114 - val_accuracy: 0.7450 - 2s/epoch - 7ms/step Epoch 29/100 283/283 - 2s - loss: 0.3692 - accuracy: 0.9755 - val_loss: 1.5429 - val_accuracy: 0.7593 - 2s/epoch - 7ms/step Epoch 30/100 283/283 - 2s - loss: 0.4075 - accuracy: 0.9671 - val_loss: 1.8357 - val_accuracy: 0.7307 - 2s/epoch - 7ms/step Epoch 31/100 283/283 - 2s - loss: 0.3913 - accuracy: 0.9733 - val_loss: 1.5254 - val_accuracy: 0.7630 - 2s/epoch - 7ms/step Epoch 32/100 283/283 - 2s - loss: 0.3565 - accuracy: 0.9809 - val_loss: 1.8618 - val_accuracy: 0.7220 - 2s/epoch - 7ms/step Epoch 33/100 283/283 - 2s - loss: 0.4424 - accuracy: 0.9550 - val_loss: 2.0990 - val_accuracy: 0.6710 - 2s/epoch - 7ms/step Epoch 34/100 283/283 - 2s - loss: 0.3865 - accuracy: 0.9765 - val_loss: 1.6468 - val_accuracy: 0.7473 - 2s/epoch - 7ms/step Epoch 35/100 283/283 - 2s - loss: 0.3191 - accuracy: 0.9879 - val_loss: 1.6404 - val_accuracy: 0.7453 - 2s/epoch - 7ms/step Epoch 36/100 283/283 - 2s - loss: 0.2787 - accuracy: 0.9905 - val_loss: 1.5754 - val_accuracy: 0.7390 - 2s/epoch - 7ms/step Epoch 37/100 283/283 - 2s - loss: 0.3129 - accuracy: 0.9781 - val_loss: 1.5563 - val_accuracy: 0.7270 - 2s/epoch - 7ms/step Epoch 38/100 283/283 - 2s - loss: 0.4608 - accuracy: 0.9531 - val_loss: 1.7024 - val_accuracy: 0.7327 - 2s/epoch - 7ms/step Epoch 39/100 283/283 - 2s - loss: 0.3701 - accuracy: 0.9817 - val_loss: 1.8737 - val_accuracy: 0.7273 - 2s/epoch - 7ms/step Epoch 40/100 283/283 - 2s - loss: 0.3906 - accuracy: 0.9736 - val_loss: 1.6673 - val_accuracy: 0.7510 - 2s/epoch - 7ms/step Epoch 41/100 283/283 - 2s - loss: 0.3783 - accuracy: 0.9776 - val_loss: 1.6834 - val_accuracy: 0.7470 - 2s/epoch - 7ms/step Epoch 42/100 283/283 - 2s - loss: 0.3561 - accuracy: 0.9821 - val_loss: 2.0030 - val_accuracy: 0.7037 - 2s/epoch - 7ms/step Epoch 43/100 283/283 - 2s - loss: 0.3984 - accuracy: 0.9710 - val_loss: 1.8070 - val_accuracy: 0.7317 - 2s/epoch - 7ms/step Epoch 44/100 283/283 - 2s - loss: 0.3400 - accuracy: 0.9852 - val_loss: 1.7015 - val_accuracy: 0.7363 - 2s/epoch - 7ms/step Epoch 45/100 283/283 - 2s - loss: 0.3524 - accuracy: 0.9815 - val_loss: 1.8167 - val_accuracy: 0.7353 - 2s/epoch - 7ms/step Epoch 46/100 283/283 - 2s - loss: 0.3569 - accuracy: 0.9786 - val_loss: 1.6337 - val_accuracy: 0.7483 - 2s/epoch - 7ms/step Epoch 47/100 283/283 - 2s - loss: 0.3355 - accuracy: 0.9844 - val_loss: 1.7786 - val_accuracy: 0.7363 - 2s/epoch - 7ms/step Epoch 48/100 283/283 - 2s - loss: 0.2927 - accuracy: 0.9903 - val_loss: 1.7774 - val_accuracy: 0.7423 - 2s/epoch - 7ms/step Epoch 49/100 283/283 - 2s - loss: 0.3535 - accuracy: 0.9733 - val_loss: 1.8876 - val_accuracy: 0.7210 - 2s/epoch - 7ms/step Epoch 50/100 283/283 - 2s - loss: 0.4318 - accuracy: 0.9615 - val_loss: 1.8714 - val_accuracy: 0.7220 - 2s/epoch - 7ms/step Epoch 51/100 283/283 - 2s - loss: 0.3914 - accuracy: 0.9784 - val_loss: 2.0926 - val_accuracy: 0.6940 - 2s/epoch - 7ms/step Epoch 52/100 283/283 - 2s - loss: 0.3154 - accuracy: 0.9931 - val_loss: 1.5481 - val_accuracy: 0.7770 - 2s/epoch - 7ms/step Epoch 53/100 283/283 - 2s - loss: 0.2677 - accuracy: 0.9941 - val_loss: 1.5110 - val_accuracy: 0.7793 - 2s/epoch - 7ms/step Epoch 54/100 283/283 - 2s - loss: 0.2848 - accuracy: 0.9824 - val_loss: 1.7618 - val_accuracy: 0.7183 - 2s/epoch - 7ms/step Epoch 55/100 283/283 - 2s - loss: 0.4601 - accuracy: 0.9536 - val_loss: 1.9629 - val_accuracy: 0.6907 - 2s/epoch - 7ms/step Epoch 56/100 283/283 - 2s - loss: 0.3918 - accuracy: 0.9792 - val_loss: 1.6380 - val_accuracy: 0.7483 - 2s/epoch - 7ms/step Epoch 57/100 283/283 - 2s - loss: 0.3063 - accuracy: 0.9945 - val_loss: 1.4562 - val_accuracy: 0.7727 - 2s/epoch - 7ms/step Epoch 58/100 283/283 - 2s - loss: 0.2462 - accuracy: 0.9980 - val_loss: 1.4502 - val_accuracy: 0.7747 - 2s/epoch - 7ms/step Epoch 59/100 283/283 - 2s - loss: 0.1905 - accuracy: 0.9999 - val_loss: 1.4266 - val_accuracy: 0.7677 - 2s/epoch - 7ms/step Epoch 60/100 283/283 - 2s - loss: 0.1435 - accuracy: 1.0000 - val_loss: 1.2846 - val_accuracy: 0.7780 - 2s/epoch - 7ms/step Epoch 61/100 283/283 - 2s - loss: 0.1087 - accuracy: 0.9992 - val_loss: 1.4433 - val_accuracy: 0.7393 - 2s/epoch - 7ms/step Epoch 62/100 283/283 - 2s - loss: 0.6833 - accuracy: 0.8775 - val_loss: 1.5163 - val_accuracy: 0.7167 - 2s/epoch - 7ms/step Epoch 63/100 283/283 - 2s - loss: 0.4547 - accuracy: 0.9660 - val_loss: 1.4187 - val_accuracy: 0.7687 - 2s/epoch - 7ms/step Epoch 64/100 283/283 - 2s - loss: 0.3146 - accuracy: 0.9965 - val_loss: 1.4310 - val_accuracy: 0.7753 - 2s/epoch - 7ms/step Epoch 65/100 283/283 - 2s - loss: 0.2447 - accuracy: 0.9990 - val_loss: 1.3303 - val_accuracy: 0.7807 - 2s/epoch - 7ms/step Epoch 66/100 283/283 - 2s - loss: 0.1888 - accuracy: 0.9993 - val_loss: 1.2104 - val_accuracy: 0.7840 - 2s/epoch - 7ms/step Epoch 67/100 283/283 - 2s - loss: 0.3656 - accuracy: 0.9556 - val_loss: 1.7838 - val_accuracy: 0.7027 - 2s/epoch - 7ms/step Epoch 68/100 283/283 - 2s - loss: 0.4680 - accuracy: 0.9545 - val_loss: 1.6436 - val_accuracy: 0.7347 - 2s/epoch - 7ms/step Epoch 69/100 283/283 - 2s - loss: 0.3533 - accuracy: 0.9886 - val_loss: 1.6535 - val_accuracy: 0.7510 - 2s/epoch - 7ms/step Epoch 70/100 283/283 - 2s - loss: 0.2777 - accuracy: 0.9960 - val_loss: 1.5948 - val_accuracy: 0.7540 - 2s/epoch - 7ms/step Epoch 71/100 283/283 - 2s - loss: 0.2433 - accuracy: 0.9959 - val_loss: 1.4672 - val_accuracy: 0.7653 - 2s/epoch - 7ms/step Epoch 72/100 283/283 - 2s - loss: 0.2299 - accuracy: 0.9934 - val_loss: 1.7893 - val_accuracy: 0.7180 - 2s/epoch - 7ms/step Epoch 73/100 283/283 - 2s - loss: 0.4392 - accuracy: 0.9473 - val_loss: 1.6937 - val_accuracy: 0.7237 - 2s/epoch - 7ms/step Epoch 74/100 283/283 - 2s - loss: 0.4106 - accuracy: 0.9720 - val_loss: 1.5821 - val_accuracy: 0.7597 - 2s/epoch - 7ms/step Epoch 75/100 283/283 - 2s - loss: 0.3083 - accuracy: 0.9946 - val_loss: 1.5158 - val_accuracy: 0.7683 - 2s/epoch - 7ms/step Epoch 76/100 283/283 - 2s - loss: 0.2480 - accuracy: 0.9984 - val_loss: 1.3818 - val_accuracy: 0.7880 - 2s/epoch - 7ms/step Epoch 77/100 283/283 - 2s - loss: 0.1891 - accuracy: 1.0000 - val_loss: 1.3014 - val_accuracy: 0.7910 - 2s/epoch - 7ms/step Epoch 78/100 283/283 - 2s - loss: 0.1420 - accuracy: 1.0000 - val_loss: 1.2110 - val_accuracy: 0.7943 - 2s/epoch - 7ms/step Epoch 79/100 283/283 - 2s - loss: 0.1053 - accuracy: 1.0000 - val_loss: 1.1123 - val_accuracy: 0.7970 - 2s/epoch - 7ms/step Epoch 80/100 283/283 - 2s - loss: 0.0774 - accuracy: 1.0000 - val_loss: 1.0459 - val_accuracy: 0.8013 - 2s/epoch - 7ms/step Epoch 81/100 283/283 - 2s - loss: 0.1011 - accuracy: 0.9895 - val_loss: 2.2270 - val_accuracy: 0.5737 - 2s/epoch - 7ms/step Epoch 82/100 283/283 - 2s - loss: 0.9133 - accuracy: 0.8274 - val_loss: 1.4562 - val_accuracy: 0.7383 - 2s/epoch - 7ms/step Epoch 83/100 283/283 - 2s - loss: 0.4566 - accuracy: 0.9754 - val_loss: 1.3503 - val_accuracy: 0.7843 - 2s/epoch - 7ms/step Epoch 84/100 283/283 - 2s - loss: 0.3554 - accuracy: 0.9901 - val_loss: 1.4814 - val_accuracy: 0.7647 - 2s/epoch - 7ms/step Epoch 85/100 283/283 - 2s - loss: 0.2623 - accuracy: 0.9994 - val_loss: 1.2711 - val_accuracy: 0.7903 - 2s/epoch - 7ms/step Epoch 86/100 283/283 - 2s - loss: 0.2001 - accuracy: 1.0000 - val_loss: 1.1852 - val_accuracy: 0.7930 - 2s/epoch - 7ms/step Epoch 87/100 283/283 - 2s - loss: 0.1506 - accuracy: 1.0000 - val_loss: 1.0966 - val_accuracy: 0.7933 - 2s/epoch - 7ms/step Epoch 88/100 283/283 - 2s - loss: 0.1123 - accuracy: 1.0000 - val_loss: 1.0259 - val_accuracy: 0.7907 - 2s/epoch - 7ms/step Epoch 89/100 283/283 - 2s - loss: 0.0831 - accuracy: 1.0000 - val_loss: 0.9778 - val_accuracy: 0.7923 - 2s/epoch - 7ms/step Epoch 90/100 283/283 - 2s - loss: 0.1184 - accuracy: 0.9844 - val_loss: 2.0744 - val_accuracy: 0.5840 - 2s/epoch - 7ms/step Epoch 91/100 283/283 - 2s - loss: 0.8158 - accuracy: 0.8548 - val_loss: 1.3378 - val_accuracy: 0.7597 - 2s/epoch - 7ms/step Epoch 92/100 283/283 - 2s - loss: 0.4367 - accuracy: 0.9776 - val_loss: 1.2897 - val_accuracy: 0.7787 - 2s/epoch - 7ms/step Epoch 93/100 283/283 - 2s - loss: 0.3038 - accuracy: 0.9997 - val_loss: 1.1891 - val_accuracy: 0.7957 - 2s/epoch - 7ms/step Epoch 94/100 283/283 - 2s - loss: 0.2289 - accuracy: 1.0000 - val_loss: 1.1012 - val_accuracy: 0.8003 - 2s/epoch - 7ms/step Epoch 95/100 283/283 - 2s - loss: 0.1732 - accuracy: 0.9999 - val_loss: 1.0459 - val_accuracy: 0.8003 - 2s/epoch - 7ms/step Epoch 96/100 283/283 - 2s - loss: 0.1298 - accuracy: 1.0000 - val_loss: 0.9825 - val_accuracy: 0.8050 - 2s/epoch - 7ms/step Epoch 97/100 283/283 - 2s - loss: 0.0958 - accuracy: 1.0000 - val_loss: 0.9453 - val_accuracy: 0.8040 - 2s/epoch - 7ms/step Epoch 98/100 283/283 - 2s - loss: 0.0709 - accuracy: 1.0000 - val_loss: 0.9669 - val_accuracy: 0.8000 - 2s/epoch - 7ms/step Epoch 99/100 283/283 - 2s - loss: 0.6797 - accuracy: 0.8695 - val_loss: 1.2755 - val_accuracy: 0.7703 - 2s/epoch - 7ms/step Epoch 100/100 283/283 - 2s - loss: 0.4781 - accuracy: 0.9644 - val_loss: 1.5424 - val_accuracy: 0.7543 - 2s/epoch - 7ms/step 94/94 - 0s - loss: 1.5424 - accuracy: 0.7543 - 307ms/epoch - 3ms/step Baseline Error: 24.57%
As we can see the model is over fitting, there also appears to be fluctuations in the accuracy and loss, this is probably due to the fact that the model is not learning well/struggling to learn the data. We can try to add more layers to the model as well as more regularization(dropout layer) to see if we can improve the performance of the model.
model_128morelayers = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(128, 128, 1)),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dropout(0.5), # Adding dropout for regularization
layers.Dense(32, activation='relu'), # Adding an additional dense layer
layers.Dense(15, activation='softmax')
])
model_128morelayers.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model_128morelayers.fit(X_train128, train_labels, validation_data=(X_val128, validation_labels),
epochs=100, batch_size=32, verbose=2, class_weight = class_weight)
scores = model_128morelayers.evaluate(X_val128, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df128.loc[len(df128)] = ['ExtraThickModel', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/100 283/283 - 5s - loss: 2.8812 - accuracy: 0.1724 - val_loss: 2.5358 - val_accuracy: 0.1723 - 5s/epoch - 18ms/step Epoch 2/100 283/283 - 2s - loss: 2.4787 - accuracy: 0.2782 - val_loss: 2.2795 - val_accuracy: 0.3067 - 2s/epoch - 7ms/step Epoch 3/100 283/283 - 2s - loss: 2.2416 - accuracy: 0.3489 - val_loss: 2.2402 - val_accuracy: 0.3203 - 2s/epoch - 7ms/step Epoch 4/100 283/283 - 2s - loss: 2.1282 - accuracy: 0.3761 - val_loss: 1.9390 - val_accuracy: 0.4270 - 2s/epoch - 7ms/step Epoch 5/100 283/283 - 2s - loss: 1.9855 - accuracy: 0.4219 - val_loss: 1.8612 - val_accuracy: 0.4387 - 2s/epoch - 7ms/step Epoch 6/100 283/283 - 2s - loss: 1.9109 - accuracy: 0.4478 - val_loss: 1.7171 - val_accuracy: 0.5153 - 2s/epoch - 7ms/step Epoch 7/100 283/283 - 2s - loss: 1.8270 - accuracy: 0.4791 - val_loss: 1.7413 - val_accuracy: 0.5180 - 2s/epoch - 7ms/step Epoch 8/100 283/283 - 2s - loss: 1.7690 - accuracy: 0.4952 - val_loss: 1.6935 - val_accuracy: 0.5317 - 2s/epoch - 7ms/step Epoch 9/100 283/283 - 2s - loss: 1.7440 - accuracy: 0.5076 - val_loss: 1.7159 - val_accuracy: 0.5380 - 2s/epoch - 7ms/step Epoch 10/100 283/283 - 2s - loss: 1.6999 - accuracy: 0.5213 - val_loss: 1.6452 - val_accuracy: 0.5610 - 2s/epoch - 7ms/step Epoch 11/100 283/283 - 2s - loss: 1.6367 - accuracy: 0.5364 - val_loss: 1.5994 - val_accuracy: 0.5787 - 2s/epoch - 7ms/step Epoch 12/100 283/283 - 2s - loss: 1.6073 - accuracy: 0.5523 - val_loss: 1.4841 - val_accuracy: 0.6373 - 2s/epoch - 7ms/step Epoch 13/100 283/283 - 2s - loss: 1.5634 - accuracy: 0.5576 - val_loss: 1.5270 - val_accuracy: 0.6117 - 2s/epoch - 7ms/step Epoch 14/100 283/283 - 2s - loss: 1.5300 - accuracy: 0.5852 - val_loss: 1.5233 - val_accuracy: 0.6127 - 2s/epoch - 7ms/step Epoch 15/100 283/283 - 2s - loss: 1.4897 - accuracy: 0.6072 - val_loss: 1.4396 - val_accuracy: 0.6283 - 2s/epoch - 7ms/step Epoch 16/100 283/283 - 2s - loss: 1.4313 - accuracy: 0.6239 - val_loss: 1.3497 - val_accuracy: 0.6760 - 2s/epoch - 7ms/step Epoch 17/100 283/283 - 2s - loss: 1.4241 - accuracy: 0.6317 - val_loss: 1.4287 - val_accuracy: 0.6553 - 2s/epoch - 7ms/step Epoch 18/100 283/283 - 2s - loss: 1.4760 - accuracy: 0.6220 - val_loss: 1.6727 - val_accuracy: 0.5920 - 2s/epoch - 7ms/step Epoch 19/100 283/283 - 2s - loss: 1.3986 - accuracy: 0.6450 - val_loss: 1.4882 - val_accuracy: 0.6423 - 2s/epoch - 7ms/step Epoch 20/100 283/283 - 2s - loss: 1.3667 - accuracy: 0.6622 - val_loss: 1.4221 - val_accuracy: 0.6627 - 2s/epoch - 7ms/step Epoch 21/100 283/283 - 2s - loss: 1.3556 - accuracy: 0.6608 - val_loss: 1.3771 - val_accuracy: 0.6900 - 2s/epoch - 7ms/step Epoch 22/100 283/283 - 2s - loss: 1.3493 - accuracy: 0.6652 - val_loss: 1.3120 - val_accuracy: 0.7040 - 2s/epoch - 7ms/step Epoch 23/100 283/283 - 2s - loss: 1.2901 - accuracy: 0.6865 - val_loss: 1.8983 - val_accuracy: 0.5920 - 2s/epoch - 7ms/step Epoch 24/100 283/283 - 2s - loss: 1.3166 - accuracy: 0.6750 - val_loss: 1.4053 - val_accuracy: 0.6753 - 2s/epoch - 7ms/step Epoch 25/100 283/283 - 2s - loss: 1.2819 - accuracy: 0.6882 - val_loss: 1.3242 - val_accuracy: 0.7040 - 2s/epoch - 7ms/step Epoch 26/100 283/283 - 2s - loss: 1.2667 - accuracy: 0.6909 - val_loss: 1.3624 - val_accuracy: 0.6963 - 2s/epoch - 7ms/step Epoch 27/100 283/283 - 2s - loss: 1.2746 - accuracy: 0.6977 - val_loss: 1.3518 - val_accuracy: 0.7043 - 2s/epoch - 7ms/step Epoch 28/100 283/283 - 2s - loss: 1.2817 - accuracy: 0.6962 - val_loss: 1.4332 - val_accuracy: 0.6763 - 2s/epoch - 7ms/step Epoch 29/100 283/283 - 2s - loss: 1.2595 - accuracy: 0.7000 - val_loss: 1.2784 - val_accuracy: 0.7247 - 2s/epoch - 7ms/step Epoch 30/100 283/283 - 2s - loss: 1.2211 - accuracy: 0.7128 - val_loss: 1.3938 - val_accuracy: 0.7000 - 2s/epoch - 7ms/step Epoch 31/100 283/283 - 2s - loss: 1.2211 - accuracy: 0.7096 - val_loss: 1.2992 - val_accuracy: 0.7230 - 2s/epoch - 7ms/step Epoch 32/100 283/283 - 2s - loss: 1.2341 - accuracy: 0.7106 - val_loss: 1.3767 - val_accuracy: 0.7040 - 2s/epoch - 7ms/step Epoch 33/100 283/283 - 2s - loss: 1.2164 - accuracy: 0.7193 - val_loss: 1.2766 - val_accuracy: 0.7310 - 2s/epoch - 7ms/step Epoch 34/100 283/283 - 2s - loss: 1.2291 - accuracy: 0.7201 - val_loss: 1.2923 - val_accuracy: 0.7310 - 2s/epoch - 7ms/step Epoch 35/100 283/283 - 2s - loss: 1.2125 - accuracy: 0.7240 - val_loss: 1.3019 - val_accuracy: 0.7247 - 2s/epoch - 7ms/step Epoch 36/100 283/283 - 2s - loss: 1.1558 - accuracy: 0.7400 - val_loss: 1.2375 - val_accuracy: 0.7390 - 2s/epoch - 7ms/step Epoch 37/100 283/283 - 2s - loss: 1.1755 - accuracy: 0.7262 - val_loss: 1.3448 - val_accuracy: 0.7087 - 2s/epoch - 7ms/step Epoch 38/100 283/283 - 2s - loss: 1.1336 - accuracy: 0.7416 - val_loss: 1.3107 - val_accuracy: 0.7307 - 2s/epoch - 7ms/step Epoch 39/100 283/283 - 2s - loss: 1.1212 - accuracy: 0.7475 - val_loss: 1.4536 - val_accuracy: 0.7073 - 2s/epoch - 7ms/step Epoch 40/100 283/283 - 2s - loss: 1.1303 - accuracy: 0.7510 - val_loss: 1.4158 - val_accuracy: 0.7000 - 2s/epoch - 7ms/step Epoch 41/100 283/283 - 2s - loss: 1.1674 - accuracy: 0.7418 - val_loss: 1.2782 - val_accuracy: 0.7310 - 2s/epoch - 7ms/step Epoch 42/100 283/283 - 2s - loss: 1.1271 - accuracy: 0.7533 - val_loss: 1.2449 - val_accuracy: 0.7413 - 2s/epoch - 7ms/step Epoch 43/100 283/283 - 2s - loss: 1.1372 - accuracy: 0.7466 - val_loss: 1.3069 - val_accuracy: 0.7287 - 2s/epoch - 7ms/step Epoch 44/100 283/283 - 2s - loss: 1.1448 - accuracy: 0.7531 - val_loss: 1.2168 - val_accuracy: 0.7553 - 2s/epoch - 7ms/step Epoch 45/100 283/283 - 2s - loss: 1.1347 - accuracy: 0.7481 - val_loss: 1.3444 - val_accuracy: 0.7267 - 2s/epoch - 7ms/step Epoch 46/100 283/283 - 2s - loss: 1.1125 - accuracy: 0.7589 - val_loss: 1.3016 - val_accuracy: 0.7397 - 2s/epoch - 7ms/step Epoch 47/100 283/283 - 2s - loss: 1.1062 - accuracy: 0.7646 - val_loss: 1.3437 - val_accuracy: 0.7133 - 2s/epoch - 7ms/step Epoch 48/100 283/283 - 2s - loss: 1.0991 - accuracy: 0.7621 - val_loss: 1.2747 - val_accuracy: 0.7430 - 2s/epoch - 7ms/step Epoch 49/100 283/283 - 2s - loss: 1.1294 - accuracy: 0.7579 - val_loss: 1.3053 - val_accuracy: 0.7337 - 2s/epoch - 7ms/step Epoch 50/100 283/283 - 2s - loss: 1.0958 - accuracy: 0.7645 - val_loss: 1.3442 - val_accuracy: 0.7237 - 2s/epoch - 7ms/step Epoch 51/100 283/283 - 2s - loss: 1.1110 - accuracy: 0.7643 - val_loss: 1.2870 - val_accuracy: 0.7357 - 2s/epoch - 7ms/step Epoch 52/100 283/283 - 2s - loss: 1.1106 - accuracy: 0.7644 - val_loss: 1.2795 - val_accuracy: 0.7377 - 2s/epoch - 7ms/step Epoch 53/100 283/283 - 2s - loss: 1.0574 - accuracy: 0.7812 - val_loss: 1.3595 - val_accuracy: 0.7197 - 2s/epoch - 7ms/step Epoch 54/100 283/283 - 2s - loss: 1.0900 - accuracy: 0.7737 - val_loss: 1.2612 - val_accuracy: 0.7460 - 2s/epoch - 7ms/step Epoch 55/100 283/283 - 2s - loss: 1.1099 - accuracy: 0.7674 - val_loss: 1.2790 - val_accuracy: 0.7403 - 2s/epoch - 7ms/step Epoch 56/100 283/283 - 2s - loss: 1.0830 - accuracy: 0.7765 - val_loss: 1.2839 - val_accuracy: 0.7470 - 2s/epoch - 7ms/step Epoch 57/100 283/283 - 2s - loss: 1.0845 - accuracy: 0.7726 - val_loss: 1.3319 - val_accuracy: 0.7243 - 2s/epoch - 7ms/step Epoch 58/100 283/283 - 2s - loss: 1.0325 - accuracy: 0.7950 - val_loss: 1.4014 - val_accuracy: 0.6960 - 2s/epoch - 7ms/step Epoch 59/100 283/283 - 2s - loss: 1.0385 - accuracy: 0.7887 - val_loss: 1.3204 - val_accuracy: 0.7287 - 2s/epoch - 7ms/step Epoch 60/100 283/283 - 2s - loss: 1.0746 - accuracy: 0.7739 - val_loss: 1.2846 - val_accuracy: 0.7350 - 2s/epoch - 7ms/step Epoch 61/100 283/283 - 2s - loss: 1.0243 - accuracy: 0.7925 - val_loss: 1.2439 - val_accuracy: 0.7560 - 2s/epoch - 7ms/step Epoch 62/100 283/283 - 2s - loss: 1.0192 - accuracy: 0.7926 - val_loss: 1.2250 - val_accuracy: 0.7513 - 2s/epoch - 7ms/step Epoch 63/100 283/283 - 2s - loss: 1.0753 - accuracy: 0.7860 - val_loss: 1.2827 - val_accuracy: 0.7417 - 2s/epoch - 7ms/step Epoch 64/100 283/283 - 2s - loss: 1.0093 - accuracy: 0.7982 - val_loss: 1.3686 - val_accuracy: 0.7397 - 2s/epoch - 7ms/step Epoch 65/100 283/283 - 2s - loss: 0.9781 - accuracy: 0.8024 - val_loss: 1.1733 - val_accuracy: 0.7717 - 2s/epoch - 7ms/step Epoch 66/100 283/283 - 2s - loss: 1.0329 - accuracy: 0.7818 - val_loss: 1.2330 - val_accuracy: 0.7667 - 2s/epoch - 7ms/step Epoch 67/100 283/283 - 2s - loss: 1.0508 - accuracy: 0.7861 - val_loss: 1.3475 - val_accuracy: 0.7323 - 2s/epoch - 7ms/step Epoch 68/100 283/283 - 2s - loss: 1.0528 - accuracy: 0.7920 - val_loss: 1.3012 - val_accuracy: 0.7377 - 2s/epoch - 7ms/step Epoch 69/100 283/283 - 2s - loss: 1.0302 - accuracy: 0.7940 - val_loss: 1.2168 - val_accuracy: 0.7567 - 2s/epoch - 7ms/step Epoch 70/100 283/283 - 2s - loss: 1.0356 - accuracy: 0.7967 - val_loss: 1.3727 - val_accuracy: 0.7350 - 2s/epoch - 7ms/step Epoch 71/100 283/283 - 2s - loss: 1.0024 - accuracy: 0.8048 - val_loss: 1.2964 - val_accuracy: 0.7567 - 2s/epoch - 7ms/step Epoch 72/100 283/283 - 2s - loss: 1.0288 - accuracy: 0.7980 - val_loss: 1.2903 - val_accuracy: 0.7480 - 2s/epoch - 7ms/step Epoch 73/100 283/283 - 2s - loss: 1.0176 - accuracy: 0.8010 - val_loss: 1.3121 - val_accuracy: 0.7463 - 2s/epoch - 7ms/step Epoch 74/100 283/283 - 2s - loss: 1.0200 - accuracy: 0.7997 - val_loss: 1.2900 - val_accuracy: 0.7390 - 2s/epoch - 7ms/step Epoch 75/100 283/283 - 2s - loss: 1.0054 - accuracy: 0.8075 - val_loss: 1.3499 - val_accuracy: 0.7280 - 2s/epoch - 7ms/step Epoch 76/100 283/283 - 2s - loss: 0.9677 - accuracy: 0.8179 - val_loss: 1.2507 - val_accuracy: 0.7623 - 2s/epoch - 8ms/step Epoch 77/100 283/283 - 2s - loss: 1.0181 - accuracy: 0.7996 - val_loss: 1.2429 - val_accuracy: 0.7630 - 2s/epoch - 7ms/step Epoch 78/100 283/283 - 2s - loss: 0.9746 - accuracy: 0.8142 - val_loss: 1.2015 - val_accuracy: 0.7850 - 2s/epoch - 7ms/step Epoch 79/100 283/283 - 2s - loss: 0.9677 - accuracy: 0.8138 - val_loss: 1.1978 - val_accuracy: 0.7717 - 2s/epoch - 7ms/step Epoch 80/100 283/283 - 2s - loss: 0.9753 - accuracy: 0.8111 - val_loss: 1.1857 - val_accuracy: 0.7797 - 2s/epoch - 7ms/step Epoch 81/100 283/283 - 2s - loss: 1.0129 - accuracy: 0.8093 - val_loss: 1.2875 - val_accuracy: 0.7507 - 2s/epoch - 7ms/step Epoch 82/100 283/283 - 2s - loss: 0.9563 - accuracy: 0.8227 - val_loss: 1.1861 - val_accuracy: 0.7800 - 2s/epoch - 7ms/step Epoch 83/100 283/283 - 2s - loss: 0.9591 - accuracy: 0.8195 - val_loss: 1.2460 - val_accuracy: 0.7643 - 2s/epoch - 7ms/step Epoch 84/100 283/283 - 2s - loss: 0.9772 - accuracy: 0.8229 - val_loss: 1.1985 - val_accuracy: 0.7690 - 2s/epoch - 7ms/step Epoch 85/100 283/283 - 2s - loss: 0.9291 - accuracy: 0.8289 - val_loss: 1.1762 - val_accuracy: 0.7747 - 2s/epoch - 7ms/step Epoch 86/100 283/283 - 2s - loss: 0.9829 - accuracy: 0.8200 - val_loss: 1.2546 - val_accuracy: 0.7577 - 2s/epoch - 7ms/step Epoch 87/100 283/283 - 2s - loss: 0.9719 - accuracy: 0.8209 - val_loss: 1.2163 - val_accuracy: 0.7710 - 2s/epoch - 7ms/step Epoch 88/100 283/283 - 2s - loss: 0.9678 - accuracy: 0.8257 - val_loss: 1.1792 - val_accuracy: 0.7673 - 2s/epoch - 7ms/step Epoch 89/100 283/283 - 2s - loss: 0.9723 - accuracy: 0.8171 - val_loss: 1.2565 - val_accuracy: 0.7567 - 2s/epoch - 7ms/step Epoch 90/100 283/283 - 2s - loss: 0.9637 - accuracy: 0.8207 - val_loss: 1.4324 - val_accuracy: 0.7133 - 2s/epoch - 7ms/step Epoch 91/100 283/283 - 2s - loss: 0.9565 - accuracy: 0.8213 - val_loss: 1.3810 - val_accuracy: 0.7383 - 2s/epoch - 7ms/step Epoch 92/100 283/283 - 2s - loss: 0.9429 - accuracy: 0.8295 - val_loss: 1.2372 - val_accuracy: 0.7737 - 2s/epoch - 7ms/step Epoch 93/100 283/283 - 2s - loss: 0.9665 - accuracy: 0.8237 - val_loss: 1.1843 - val_accuracy: 0.7800 - 2s/epoch - 7ms/step Epoch 94/100 283/283 - 2s - loss: 0.9309 - accuracy: 0.8303 - val_loss: 1.1748 - val_accuracy: 0.7833 - 2s/epoch - 7ms/step Epoch 95/100 283/283 - 2s - loss: 0.9385 - accuracy: 0.8299 - val_loss: 1.1716 - val_accuracy: 0.7843 - 2s/epoch - 7ms/step Epoch 96/100 283/283 - 2s - loss: 0.9225 - accuracy: 0.8377 - val_loss: 1.1325 - val_accuracy: 0.7893 - 2s/epoch - 7ms/step Epoch 97/100 283/283 - 2s - loss: 0.9380 - accuracy: 0.8262 - val_loss: 1.2151 - val_accuracy: 0.7713 - 2s/epoch - 7ms/step Epoch 98/100 283/283 - 2s - loss: 0.9294 - accuracy: 0.8312 - val_loss: 1.2327 - val_accuracy: 0.7673 - 2s/epoch - 7ms/step Epoch 99/100 283/283 - 2s - loss: 0.9403 - accuracy: 0.8299 - val_loss: 1.1838 - val_accuracy: 0.7850 - 2s/epoch - 7ms/step Epoch 100/100 283/283 - 2s - loss: 0.9208 - accuracy: 0.8353 - val_loss: 1.2471 - val_accuracy: 0.7660 - 2s/epoch - 7ms/step 94/94 - 0s - loss: 1.2471 - accuracy: 0.7660 - 302ms/epoch - 3ms/step Baseline Error: 23.40%
We can see that the model still seems to be overfitting and there is still some fluctuations, we should first try to stabilize the model by adding more regularization layers or through other means like image augmentation or applying batch normalization. As the model training is slightly slow, i think that applying batch normalization would be a good idea as it would allow the model to converge faster as well as making the model more stable, which will solve 2 of my problems at once.
model_128BN = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(128, 128, 1)),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dropout(0.5),
layers.Dense(32, activation='relu'),
layers.BatchNormalization(), #add batch normalization
layers.Dense(15, activation='softmax')
])
model_128BN.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model_128BN.fit(X_train128, train_labels, validation_data=(X_val128, validation_labels),
epochs=100, batch_size=32, verbose=2, class_weight = class_weight)
scores = model_128BN.evaluate(X_val128, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df128.loc[len(df128)] = ['BNModel', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/100 283/283 - 5s - loss: 3.4392 - accuracy: 0.1115 - val_loss: 2.7890 - val_accuracy: 0.0853 - 5s/epoch - 18ms/step Epoch 2/100 283/283 - 2s - loss: 2.9918 - accuracy: 0.1653 - val_loss: 2.9038 - val_accuracy: 0.1007 - 2s/epoch - 8ms/step Epoch 3/100 283/283 - 2s - loss: 2.7965 - accuracy: 0.2067 - val_loss: 2.8479 - val_accuracy: 0.1143 - 2s/epoch - 8ms/step Epoch 4/100 283/283 - 2s - loss: 2.6494 - accuracy: 0.2547 - val_loss: 2.7457 - val_accuracy: 0.1447 - 2s/epoch - 8ms/step Epoch 5/100 283/283 - 2s - loss: 2.5035 - accuracy: 0.2994 - val_loss: 2.5735 - val_accuracy: 0.2213 - 2s/epoch - 8ms/step Epoch 6/100 283/283 - 2s - loss: 2.5430 - accuracy: 0.2953 - val_loss: 2.3594 - val_accuracy: 0.3077 - 2s/epoch - 8ms/step Epoch 7/100 283/283 - 2s - loss: 2.3426 - accuracy: 0.3477 - val_loss: 2.4077 - val_accuracy: 0.2917 - 2s/epoch - 8ms/step Epoch 8/100 283/283 - 2s - loss: 2.3587 - accuracy: 0.3510 - val_loss: 2.1295 - val_accuracy: 0.3817 - 2s/epoch - 8ms/step Epoch 9/100 283/283 - 2s - loss: 2.1896 - accuracy: 0.3969 - val_loss: 2.0155 - val_accuracy: 0.4313 - 2s/epoch - 8ms/step Epoch 10/100 283/283 - 2s - loss: 2.1031 - accuracy: 0.4228 - val_loss: 1.8990 - val_accuracy: 0.4683 - 2s/epoch - 8ms/step Epoch 11/100 283/283 - 2s - loss: 2.0431 - accuracy: 0.4435 - val_loss: 1.8384 - val_accuracy: 0.5103 - 2s/epoch - 8ms/step Epoch 12/100 283/283 - 2s - loss: 1.9108 - accuracy: 0.4745 - val_loss: 3.8159 - val_accuracy: 0.1300 - 2s/epoch - 8ms/step Epoch 13/100 283/283 - 2s - loss: 2.7060 - accuracy: 0.3086 - val_loss: 2.1527 - val_accuracy: 0.3710 - 2s/epoch - 8ms/step Epoch 14/100 283/283 - 2s - loss: 1.8885 - accuracy: 0.4796 - val_loss: 1.7734 - val_accuracy: 0.5163 - 2s/epoch - 8ms/step Epoch 15/100 283/283 - 2s - loss: 1.7798 - accuracy: 0.5183 - val_loss: 1.6346 - val_accuracy: 0.5720 - 2s/epoch - 8ms/step Epoch 16/100 283/283 - 2s - loss: 1.7144 - accuracy: 0.5402 - val_loss: 1.6883 - val_accuracy: 0.5520 - 2s/epoch - 8ms/step Epoch 17/100 283/283 - 2s - loss: 1.6559 - accuracy: 0.5575 - val_loss: 1.9102 - val_accuracy: 0.4860 - 2s/epoch - 8ms/step Epoch 18/100 283/283 - 2s - loss: 1.9823 - accuracy: 0.4983 - val_loss: 1.7499 - val_accuracy: 0.5483 - 2s/epoch - 7ms/step Epoch 19/100 283/283 - 2s - loss: 1.7148 - accuracy: 0.5579 - val_loss: 1.6632 - val_accuracy: 0.5790 - 2s/epoch - 7ms/step Epoch 20/100 283/283 - 2s - loss: 1.5819 - accuracy: 0.5920 - val_loss: 1.6773 - val_accuracy: 0.5733 - 2s/epoch - 8ms/step Epoch 21/100 283/283 - 2s - loss: 1.5613 - accuracy: 0.5974 - val_loss: 1.5473 - val_accuracy: 0.6247 - 2s/epoch - 8ms/step Epoch 22/100 283/283 - 2s - loss: 1.5153 - accuracy: 0.6182 - val_loss: 1.4335 - val_accuracy: 0.6660 - 2s/epoch - 8ms/step Epoch 23/100 283/283 - 2s - loss: 1.4870 - accuracy: 0.6348 - val_loss: 1.7947 - val_accuracy: 0.5643 - 2s/epoch - 8ms/step Epoch 24/100 283/283 - 2s - loss: 1.4552 - accuracy: 0.6461 - val_loss: 1.4867 - val_accuracy: 0.6513 - 2s/epoch - 8ms/step Epoch 25/100 283/283 - 2s - loss: 1.4396 - accuracy: 0.6522 - val_loss: 1.4843 - val_accuracy: 0.6347 - 2s/epoch - 7ms/step Epoch 26/100 283/283 - 2s - loss: 1.3835 - accuracy: 0.6678 - val_loss: 1.5298 - val_accuracy: 0.6300 - 2s/epoch - 8ms/step Epoch 27/100 283/283 - 2s - loss: 1.4232 - accuracy: 0.6678 - val_loss: 1.5109 - val_accuracy: 0.6553 - 2s/epoch - 8ms/step Epoch 28/100 283/283 - 2s - loss: 1.3484 - accuracy: 0.6830 - val_loss: 1.3453 - val_accuracy: 0.7027 - 2s/epoch - 8ms/step Epoch 29/100 283/283 - 2s - loss: 1.3287 - accuracy: 0.6872 - val_loss: 1.3401 - val_accuracy: 0.6993 - 2s/epoch - 7ms/step Epoch 30/100 283/283 - 2s - loss: 1.3908 - accuracy: 0.6767 - val_loss: 1.3759 - val_accuracy: 0.6900 - 2s/epoch - 8ms/step Epoch 31/100 283/283 - 2s - loss: 1.3719 - accuracy: 0.6838 - val_loss: 1.3424 - val_accuracy: 0.7093 - 2s/epoch - 8ms/step Epoch 32/100 283/283 - 2s - loss: 1.2868 - accuracy: 0.7026 - val_loss: 1.5283 - val_accuracy: 0.6490 - 2s/epoch - 8ms/step Epoch 33/100 283/283 - 2s - loss: 1.2681 - accuracy: 0.7127 - val_loss: 1.4256 - val_accuracy: 0.6853 - 2s/epoch - 8ms/step Epoch 34/100 283/283 - 2s - loss: 1.2565 - accuracy: 0.7133 - val_loss: 1.2794 - val_accuracy: 0.7303 - 2s/epoch - 8ms/step Epoch 35/100 283/283 - 2s - loss: 1.2551 - accuracy: 0.7144 - val_loss: 1.2962 - val_accuracy: 0.7187 - 2s/epoch - 8ms/step Epoch 36/100 283/283 - 2s - loss: 1.2669 - accuracy: 0.7126 - val_loss: 1.3423 - val_accuracy: 0.6990 - 2s/epoch - 8ms/step Epoch 37/100 283/283 - 2s - loss: 1.2046 - accuracy: 0.7346 - val_loss: 1.3755 - val_accuracy: 0.6893 - 2s/epoch - 8ms/step Epoch 38/100 283/283 - 2s - loss: 1.3182 - accuracy: 0.7125 - val_loss: 1.3744 - val_accuracy: 0.6960 - 2s/epoch - 8ms/step Epoch 39/100 283/283 - 2s - loss: 1.1917 - accuracy: 0.7436 - val_loss: 1.4559 - val_accuracy: 0.6790 - 2s/epoch - 8ms/step Epoch 40/100 283/283 - 2s - loss: 1.1963 - accuracy: 0.7388 - val_loss: 1.4839 - val_accuracy: 0.6640 - 2s/epoch - 7ms/step Epoch 41/100 283/283 - 2s - loss: 1.2504 - accuracy: 0.7240 - val_loss: 1.3604 - val_accuracy: 0.6967 - 2s/epoch - 8ms/step Epoch 42/100 283/283 - 2s - loss: 1.1978 - accuracy: 0.7434 - val_loss: 1.3946 - val_accuracy: 0.6967 - 2s/epoch - 8ms/step Epoch 43/100 283/283 - 2s - loss: 1.1824 - accuracy: 0.7459 - val_loss: 1.2944 - val_accuracy: 0.7267 - 2s/epoch - 8ms/step Epoch 44/100 283/283 - 2s - loss: 1.1504 - accuracy: 0.7506 - val_loss: 1.2656 - val_accuracy: 0.7250 - 2s/epoch - 8ms/step Epoch 45/100 283/283 - 2s - loss: 1.1801 - accuracy: 0.7473 - val_loss: 1.4329 - val_accuracy: 0.6840 - 2s/epoch - 8ms/step Epoch 46/100 283/283 - 2s - loss: 1.1830 - accuracy: 0.7488 - val_loss: 1.4045 - val_accuracy: 0.6897 - 2s/epoch - 8ms/step Epoch 47/100 283/283 - 2s - loss: 1.1408 - accuracy: 0.7636 - val_loss: 1.5179 - val_accuracy: 0.6587 - 2s/epoch - 7ms/step Epoch 48/100 283/283 - 2s - loss: 1.1188 - accuracy: 0.7654 - val_loss: 1.3120 - val_accuracy: 0.7157 - 2s/epoch - 8ms/step Epoch 49/100 283/283 - 2s - loss: 1.1039 - accuracy: 0.7698 - val_loss: 1.2970 - val_accuracy: 0.7230 - 2s/epoch - 8ms/step Epoch 50/100 283/283 - 2s - loss: 1.1265 - accuracy: 0.7634 - val_loss: 1.3948 - val_accuracy: 0.6977 - 2s/epoch - 8ms/step Epoch 51/100 283/283 - 2s - loss: 1.1139 - accuracy: 0.7668 - val_loss: 1.3010 - val_accuracy: 0.7227 - 2s/epoch - 8ms/step Epoch 52/100 283/283 - 2s - loss: 1.0966 - accuracy: 0.7750 - val_loss: 1.4001 - val_accuracy: 0.7070 - 2s/epoch - 8ms/step Epoch 53/100 283/283 - 2s - loss: 1.1044 - accuracy: 0.7734 - val_loss: 1.2972 - val_accuracy: 0.7243 - 2s/epoch - 8ms/step Epoch 54/100 283/283 - 2s - loss: 1.0858 - accuracy: 0.7760 - val_loss: 1.2901 - val_accuracy: 0.7247 - 2s/epoch - 8ms/step Epoch 55/100 283/283 - 2s - loss: 1.1199 - accuracy: 0.7655 - val_loss: 1.3006 - val_accuracy: 0.7203 - 2s/epoch - 8ms/step Epoch 56/100 283/283 - 2s - loss: 1.0705 - accuracy: 0.7792 - val_loss: 1.4242 - val_accuracy: 0.6830 - 2s/epoch - 8ms/step Epoch 57/100 283/283 - 2s - loss: 1.0561 - accuracy: 0.7905 - val_loss: 1.3399 - val_accuracy: 0.7137 - 2s/epoch - 8ms/step Epoch 58/100 283/283 - 2s - loss: 1.0536 - accuracy: 0.7879 - val_loss: 1.3857 - val_accuracy: 0.7037 - 2s/epoch - 8ms/step Epoch 59/100 283/283 - 2s - loss: 1.0456 - accuracy: 0.7936 - val_loss: 1.4647 - val_accuracy: 0.6850 - 2s/epoch - 8ms/step Epoch 60/100 283/283 - 2s - loss: 1.0817 - accuracy: 0.7781 - val_loss: 1.4635 - val_accuracy: 0.6890 - 2s/epoch - 8ms/step Epoch 61/100 283/283 - 2s - loss: 1.0725 - accuracy: 0.7852 - val_loss: 1.3276 - val_accuracy: 0.7200 - 2s/epoch - 8ms/step Epoch 62/100 283/283 - 2s - loss: 1.0565 - accuracy: 0.7844 - val_loss: 1.4217 - val_accuracy: 0.6873 - 2s/epoch - 8ms/step Epoch 63/100 283/283 - 2s - loss: 1.0297 - accuracy: 0.7980 - val_loss: 1.3568 - val_accuracy: 0.7090 - 2s/epoch - 8ms/step Epoch 64/100 283/283 - 2s - loss: 1.0931 - accuracy: 0.7775 - val_loss: 1.3791 - val_accuracy: 0.7090 - 2s/epoch - 7ms/step Epoch 65/100 283/283 - 2s - loss: 1.0986 - accuracy: 0.7800 - val_loss: 1.3623 - val_accuracy: 0.7177 - 2s/epoch - 7ms/step Epoch 66/100 283/283 - 2s - loss: 1.0409 - accuracy: 0.7966 - val_loss: 1.5560 - val_accuracy: 0.6713 - 2s/epoch - 7ms/step Epoch 67/100 283/283 - 2s - loss: 1.0493 - accuracy: 0.7918 - val_loss: 1.3573 - val_accuracy: 0.7170 - 2s/epoch - 7ms/step Epoch 68/100 283/283 - 2s - loss: 1.0252 - accuracy: 0.8041 - val_loss: 1.3173 - val_accuracy: 0.7303 - 2s/epoch - 7ms/step Epoch 69/100 283/283 - 2s - loss: 0.9857 - accuracy: 0.8064 - val_loss: 1.3156 - val_accuracy: 0.7190 - 2s/epoch - 7ms/step Epoch 70/100 283/283 - 2s - loss: 1.0402 - accuracy: 0.7963 - val_loss: 1.4080 - val_accuracy: 0.7090 - 2s/epoch - 8ms/step Epoch 71/100 283/283 - 2s - loss: 1.0307 - accuracy: 0.8028 - val_loss: 1.4405 - val_accuracy: 0.6940 - 2s/epoch - 8ms/step Epoch 72/100 283/283 - 2s - loss: 1.0050 - accuracy: 0.8070 - val_loss: 1.3891 - val_accuracy: 0.6977 - 2s/epoch - 8ms/step Epoch 73/100 283/283 - 2s - loss: 1.0434 - accuracy: 0.7985 - val_loss: 1.3774 - val_accuracy: 0.7183 - 2s/epoch - 7ms/step Epoch 74/100 283/283 - 2s - loss: 1.0326 - accuracy: 0.7909 - val_loss: 1.3687 - val_accuracy: 0.7177 - 2s/epoch - 8ms/step Epoch 75/100 283/283 - 2s - loss: 1.0077 - accuracy: 0.8055 - val_loss: 1.7147 - val_accuracy: 0.6247 - 2s/epoch - 7ms/step Epoch 76/100 283/283 - 2s - loss: 0.9917 - accuracy: 0.8130 - val_loss: 1.5604 - val_accuracy: 0.6767 - 2s/epoch - 7ms/step Epoch 77/100 283/283 - 2s - loss: 1.0537 - accuracy: 0.7979 - val_loss: 1.4223 - val_accuracy: 0.7027 - 2s/epoch - 8ms/step Epoch 78/100 283/283 - 2s - loss: 0.9821 - accuracy: 0.8138 - val_loss: 1.6633 - val_accuracy: 0.6587 - 2s/epoch - 8ms/step Epoch 79/100 283/283 - 2s - loss: 1.1394 - accuracy: 0.7798 - val_loss: 1.3772 - val_accuracy: 0.7200 - 2s/epoch - 8ms/step Epoch 80/100 283/283 - 2s - loss: 1.0464 - accuracy: 0.8017 - val_loss: 1.4488 - val_accuracy: 0.6910 - 2s/epoch - 7ms/step Epoch 81/100 283/283 - 2s - loss: 0.9613 - accuracy: 0.8161 - val_loss: 1.5798 - val_accuracy: 0.6670 - 2s/epoch - 8ms/step Epoch 82/100 283/283 - 2s - loss: 0.9806 - accuracy: 0.8201 - val_loss: 1.3902 - val_accuracy: 0.7137 - 2s/epoch - 8ms/step Epoch 83/100 283/283 - 2s - loss: 1.0113 - accuracy: 0.8129 - val_loss: 1.4386 - val_accuracy: 0.6937 - 2s/epoch - 8ms/step Epoch 84/100 283/283 - 2s - loss: 1.0360 - accuracy: 0.8073 - val_loss: 1.5480 - val_accuracy: 0.6747 - 2s/epoch - 8ms/step Epoch 85/100 283/283 - 2s - loss: 0.9516 - accuracy: 0.8330 - val_loss: 1.3873 - val_accuracy: 0.7257 - 2s/epoch - 8ms/step Epoch 86/100 283/283 - 2s - loss: 0.9885 - accuracy: 0.8158 - val_loss: 1.4499 - val_accuracy: 0.6993 - 2s/epoch - 8ms/step Epoch 87/100 283/283 - 2s - loss: 1.0081 - accuracy: 0.8114 - val_loss: 1.4305 - val_accuracy: 0.7093 - 2s/epoch - 7ms/step Epoch 88/100 283/283 - 2s - loss: 0.9829 - accuracy: 0.8190 - val_loss: 1.4197 - val_accuracy: 0.7047 - 2s/epoch - 8ms/step Epoch 89/100 283/283 - 2s - loss: 0.9615 - accuracy: 0.8222 - val_loss: 1.4087 - val_accuracy: 0.7107 - 2s/epoch - 7ms/step Epoch 90/100 283/283 - 2s - loss: 0.9623 - accuracy: 0.8178 - val_loss: 1.4780 - val_accuracy: 0.6923 - 2s/epoch - 7ms/step Epoch 91/100 283/283 - 2s - loss: 0.9611 - accuracy: 0.8158 - val_loss: 1.4663 - val_accuracy: 0.6953 - 2s/epoch - 8ms/step Epoch 92/100 283/283 - 2s - loss: 0.9713 - accuracy: 0.8158 - val_loss: 1.4091 - val_accuracy: 0.7097 - 2s/epoch - 8ms/step Epoch 93/100 283/283 - 2s - loss: 0.9364 - accuracy: 0.8327 - val_loss: 1.4059 - val_accuracy: 0.7103 - 2s/epoch - 8ms/step Epoch 94/100 283/283 - 2s - loss: 0.9288 - accuracy: 0.8269 - val_loss: 1.4002 - val_accuracy: 0.7100 - 2s/epoch - 8ms/step Epoch 95/100 283/283 - 2s - loss: 0.9441 - accuracy: 0.8247 - val_loss: 1.4027 - val_accuracy: 0.7080 - 2s/epoch - 8ms/step Epoch 96/100 283/283 - 2s - loss: 0.9350 - accuracy: 0.8324 - val_loss: 1.4092 - val_accuracy: 0.7070 - 2s/epoch - 8ms/step Epoch 97/100 283/283 - 2s - loss: 0.9386 - accuracy: 0.8281 - val_loss: 1.5582 - val_accuracy: 0.6783 - 2s/epoch - 8ms/step Epoch 98/100 283/283 - 2s - loss: 0.9675 - accuracy: 0.8200 - val_loss: 1.5581 - val_accuracy: 0.6703 - 2s/epoch - 8ms/step Epoch 99/100 283/283 - 2s - loss: 0.9214 - accuracy: 0.8335 - val_loss: 1.4141 - val_accuracy: 0.7143 - 2s/epoch - 8ms/step Epoch 100/100 283/283 - 2s - loss: 0.9467 - accuracy: 0.8261 - val_loss: 1.3638 - val_accuracy: 0.7377 - 2s/epoch - 8ms/step 94/94 - 0s - loss: 1.3638 - accuracy: 0.7377 - 304ms/epoch - 3ms/step Baseline Error: 26.23%
The graph seems to be slightly better compared to the previous graph, however the accuracy is still bottle necking at around 70%. To solve this issue, we can try to use lr decay in order to let the model learn better, hopefully leading to better results.
model_128BNlrd = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(128, 128, 1)),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dropout(0.5),
layers.Dense(32, activation='relu'),
layers.BatchNormalization(),
layers.Dense(15, activation='softmax')
])
model_128BNlrd.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model_128BNlrd.fit(X_train128, train_labels, validation_data=(X_val128, validation_labels),
epochs=100, batch_size=32, verbose=2, class_weight = class_weight,
callbacks=[ReduceLROnPlateau(monitor='val_loss', factor=0.8, patience=10, verbose=1, mode='auto', min_delta=0.0001, cooldown=0, min_lr=0)]) #lr decay
scores = model_128BNlrd.evaluate(X_val128, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df128.loc[len(df128)] = ['BNlrdModel', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/100 283/283 - 5s - loss: 3.4319 - accuracy: 0.1107 - val_loss: 2.8222 - val_accuracy: 0.0843 - lr: 0.0010 - 5s/epoch - 18ms/step Epoch 2/100 283/283 - 2s - loss: 2.9866 - accuracy: 0.1571 - val_loss: 3.0187 - val_accuracy: 0.1297 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 3/100 283/283 - 2s - loss: 2.9096 - accuracy: 0.1661 - val_loss: 2.7145 - val_accuracy: 0.1373 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 4/100 283/283 - 2s - loss: 2.8584 - accuracy: 0.1718 - val_loss: 2.7253 - val_accuracy: 0.1240 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 5/100 283/283 - 2s - loss: 2.7834 - accuracy: 0.1788 - val_loss: 2.6522 - val_accuracy: 0.1443 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 6/100 283/283 - 2s - loss: 2.7047 - accuracy: 0.2209 - val_loss: 2.6407 - val_accuracy: 0.1970 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 7/100 283/283 - 2s - loss: 2.6536 - accuracy: 0.2353 - val_loss: 2.5766 - val_accuracy: 0.1980 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 8/100 283/283 - 2s - loss: 2.5346 - accuracy: 0.2574 - val_loss: 2.5174 - val_accuracy: 0.2423 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 9/100 283/283 - 2s - loss: 2.4348 - accuracy: 0.2821 - val_loss: 2.3946 - val_accuracy: 0.2973 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 10/100 283/283 - 2s - loss: 2.4360 - accuracy: 0.2982 - val_loss: 2.1883 - val_accuracy: 0.3687 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 11/100 283/283 - 2s - loss: 2.2262 - accuracy: 0.3455 - val_loss: 2.1186 - val_accuracy: 0.3723 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 12/100 283/283 - 2s - loss: 2.0845 - accuracy: 0.3872 - val_loss: 2.0613 - val_accuracy: 0.4243 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 13/100 283/283 - 2s - loss: 1.9880 - accuracy: 0.4153 - val_loss: 2.0253 - val_accuracy: 0.4300 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 14/100 283/283 - 2s - loss: 1.9413 - accuracy: 0.4518 - val_loss: 1.7861 - val_accuracy: 0.5273 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 15/100 283/283 - 2s - loss: 1.8648 - accuracy: 0.4821 - val_loss: 1.7109 - val_accuracy: 0.5447 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 16/100 283/283 - 2s - loss: 1.7857 - accuracy: 0.5115 - val_loss: 1.6704 - val_accuracy: 0.5787 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 17/100 283/283 - 2s - loss: 1.7549 - accuracy: 0.5216 - val_loss: 1.6717 - val_accuracy: 0.5820 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 18/100 283/283 - 2s - loss: 1.6627 - accuracy: 0.5513 - val_loss: 1.6423 - val_accuracy: 0.5867 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 19/100 283/283 - 2s - loss: 1.5994 - accuracy: 0.5830 - val_loss: 1.6058 - val_accuracy: 0.5733 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 20/100 283/283 - 2s - loss: 1.6192 - accuracy: 0.5864 - val_loss: 1.6096 - val_accuracy: 0.6187 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 21/100 283/283 - 2s - loss: 1.5874 - accuracy: 0.5970 - val_loss: 1.6689 - val_accuracy: 0.5917 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 22/100 283/283 - 2s - loss: 1.5334 - accuracy: 0.6231 - val_loss: 1.4975 - val_accuracy: 0.6307 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 23/100 283/283 - 2s - loss: 1.4857 - accuracy: 0.6366 - val_loss: 1.4852 - val_accuracy: 0.6387 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 24/100 283/283 - 2s - loss: 1.4810 - accuracy: 0.6376 - val_loss: 1.5422 - val_accuracy: 0.6197 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 25/100 283/283 - 2s - loss: 1.4393 - accuracy: 0.6516 - val_loss: 1.4078 - val_accuracy: 0.6740 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 26/100 283/283 - 2s - loss: 1.4485 - accuracy: 0.6577 - val_loss: 1.6110 - val_accuracy: 0.6110 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 27/100 283/283 - 2s - loss: 1.4163 - accuracy: 0.6802 - val_loss: 1.3862 - val_accuracy: 0.6990 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 28/100 283/283 - 2s - loss: 1.4253 - accuracy: 0.6733 - val_loss: 1.5261 - val_accuracy: 0.6473 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 29/100 283/283 - 2s - loss: 1.3775 - accuracy: 0.6936 - val_loss: 1.3771 - val_accuracy: 0.6980 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 30/100 283/283 - 2s - loss: 1.3616 - accuracy: 0.6845 - val_loss: 1.5702 - val_accuracy: 0.6320 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 31/100 283/283 - 2s - loss: 1.3405 - accuracy: 0.7007 - val_loss: 1.4274 - val_accuracy: 0.6803 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 32/100 283/283 - 2s - loss: 1.3575 - accuracy: 0.6963 - val_loss: 1.4000 - val_accuracy: 0.6960 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 33/100 283/283 - 2s - loss: 1.2972 - accuracy: 0.7126 - val_loss: 1.3674 - val_accuracy: 0.7093 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 34/100 283/283 - 2s - loss: 1.2924 - accuracy: 0.7121 - val_loss: 1.3835 - val_accuracy: 0.7043 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 35/100 283/283 - 2s - loss: 1.2876 - accuracy: 0.7218 - val_loss: 1.3448 - val_accuracy: 0.7223 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 36/100 283/283 - 2s - loss: 1.2804 - accuracy: 0.7253 - val_loss: 1.3186 - val_accuracy: 0.7293 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 37/100 283/283 - 2s - loss: 1.2517 - accuracy: 0.7277 - val_loss: 1.2920 - val_accuracy: 0.7307 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 38/100 283/283 - 2s - loss: 1.2805 - accuracy: 0.7190 - val_loss: 1.3787 - val_accuracy: 0.7133 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 39/100 283/283 - 2s - loss: 1.2854 - accuracy: 0.7189 - val_loss: 1.3080 - val_accuracy: 0.7370 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 40/100 283/283 - 2s - loss: 1.2309 - accuracy: 0.7453 - val_loss: 1.4346 - val_accuracy: 0.6870 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 41/100 283/283 - 2s - loss: 1.2906 - accuracy: 0.7239 - val_loss: 1.3229 - val_accuracy: 0.7390 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 42/100 283/283 - 2s - loss: 1.2364 - accuracy: 0.7389 - val_loss: 1.3021 - val_accuracy: 0.7323 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 43/100 283/283 - 2s - loss: 1.2489 - accuracy: 0.7333 - val_loss: 1.2974 - val_accuracy: 0.7353 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 44/100 283/283 - 2s - loss: 1.2256 - accuracy: 0.7434 - val_loss: 1.3968 - val_accuracy: 0.7003 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 45/100 283/283 - 2s - loss: 1.1840 - accuracy: 0.7482 - val_loss: 1.5325 - val_accuracy: 0.6583 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 46/100 283/283 - 2s - loss: 1.2159 - accuracy: 0.7451 - val_loss: 1.2511 - val_accuracy: 0.7467 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 47/100 283/283 - 2s - loss: 1.2222 - accuracy: 0.7467 - val_loss: 1.3774 - val_accuracy: 0.7033 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 48/100 283/283 - 2s - loss: 1.1817 - accuracy: 0.7549 - val_loss: 1.4124 - val_accuracy: 0.6940 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 49/100 283/283 - 2s - loss: 1.1929 - accuracy: 0.7519 - val_loss: 1.3115 - val_accuracy: 0.7197 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 50/100 283/283 - 2s - loss: 1.2102 - accuracy: 0.7466 - val_loss: 1.3186 - val_accuracy: 0.7223 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 51/100 283/283 - 2s - loss: 1.1864 - accuracy: 0.7571 - val_loss: 1.2531 - val_accuracy: 0.7470 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 52/100 283/283 - 2s - loss: 1.1694 - accuracy: 0.7558 - val_loss: 1.2661 - val_accuracy: 0.7387 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 53/100 283/283 - 2s - loss: 1.1301 - accuracy: 0.7632 - val_loss: 1.4019 - val_accuracy: 0.6910 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 54/100 283/283 - 2s - loss: 1.1961 - accuracy: 0.7544 - val_loss: 1.2769 - val_accuracy: 0.7407 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 55/100 283/283 - 2s - loss: 1.1649 - accuracy: 0.7653 - val_loss: 1.2118 - val_accuracy: 0.7590 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 56/100 283/283 - 2s - loss: 1.1243 - accuracy: 0.7736 - val_loss: 1.3412 - val_accuracy: 0.7213 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 57/100 283/283 - 2s - loss: 1.1314 - accuracy: 0.7687 - val_loss: 1.2862 - val_accuracy: 0.7370 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 58/100 283/283 - 2s - loss: 1.1371 - accuracy: 0.7657 - val_loss: 1.2332 - val_accuracy: 0.7450 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 59/100 283/283 - 2s - loss: 1.1029 - accuracy: 0.7750 - val_loss: 1.2533 - val_accuracy: 0.7477 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 60/100 283/283 - 2s - loss: 1.1416 - accuracy: 0.7706 - val_loss: 1.2427 - val_accuracy: 0.7560 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 61/100 283/283 - 2s - loss: 1.1693 - accuracy: 0.7592 - val_loss: 1.3230 - val_accuracy: 0.7287 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 62/100 283/283 - 2s - loss: 1.1502 - accuracy: 0.7666 - val_loss: 1.2497 - val_accuracy: 0.7610 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 63/100 283/283 - 2s - loss: 1.1184 - accuracy: 0.7753 - val_loss: 1.2191 - val_accuracy: 0.7573 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 64/100 283/283 - 2s - loss: 1.1282 - accuracy: 0.7759 - val_loss: 1.2930 - val_accuracy: 0.7280 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 65/100 283/283 - 2s - loss: 1.0766 - accuracy: 0.7843 - val_loss: 1.2012 - val_accuracy: 0.7707 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 66/100 283/283 - 2s - loss: 1.1015 - accuracy: 0.7774 - val_loss: 1.3396 - val_accuracy: 0.7180 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 67/100 283/283 - 2s - loss: 1.1272 - accuracy: 0.7723 - val_loss: 1.3074 - val_accuracy: 0.7277 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 68/100 283/283 - 2s - loss: 1.1153 - accuracy: 0.7779 - val_loss: 1.1848 - val_accuracy: 0.7777 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 69/100 283/283 - 2s - loss: 1.1324 - accuracy: 0.7777 - val_loss: 1.2679 - val_accuracy: 0.7557 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 70/100 283/283 - 2s - loss: 1.1143 - accuracy: 0.7830 - val_loss: 1.3005 - val_accuracy: 0.7393 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 71/100 283/283 - 2s - loss: 1.1663 - accuracy: 0.7691 - val_loss: 1.3360 - val_accuracy: 0.7360 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 72/100 283/283 - 2s - loss: 1.1183 - accuracy: 0.7858 - val_loss: 1.3498 - val_accuracy: 0.7263 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 73/100 283/283 - 2s - loss: 1.0814 - accuracy: 0.7874 - val_loss: 1.2805 - val_accuracy: 0.7557 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 74/100 283/283 - 2s - loss: 1.1018 - accuracy: 0.7883 - val_loss: 1.3851 - val_accuracy: 0.7090 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 75/100 283/283 - 2s - loss: 1.1600 - accuracy: 0.7687 - val_loss: 1.2906 - val_accuracy: 0.7440 - lr: 0.0010 - 2s/epoch - 7ms/step Epoch 76/100 283/283 - 2s - loss: 1.0711 - accuracy: 0.8008 - val_loss: 1.3697 - val_accuracy: 0.7083 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 77/100 283/283 - 2s - loss: 1.0655 - accuracy: 0.7970 - val_loss: 1.2958 - val_accuracy: 0.7413 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 78/100 Epoch 78: ReduceLROnPlateau reducing learning rate to 0.000800000037997961. 283/283 - 2s - loss: 1.1027 - accuracy: 0.7844 - val_loss: 1.3624 - val_accuracy: 0.7170 - lr: 0.0010 - 2s/epoch - 8ms/step Epoch 79/100 283/283 - 2s - loss: 1.0159 - accuracy: 0.8025 - val_loss: 1.2130 - val_accuracy: 0.7543 - lr: 8.0000e-04 - 2s/epoch - 8ms/step Epoch 80/100 283/283 - 2s - loss: 0.9631 - accuracy: 0.8182 - val_loss: 1.2089 - val_accuracy: 0.7570 - lr: 8.0000e-04 - 2s/epoch - 7ms/step Epoch 81/100 283/283 - 2s - loss: 0.9503 - accuracy: 0.8154 - val_loss: 1.2013 - val_accuracy: 0.7490 - lr: 8.0000e-04 - 2s/epoch - 7ms/step Epoch 82/100 283/283 - 2s - loss: 0.9484 - accuracy: 0.8191 - val_loss: 1.1721 - val_accuracy: 0.7590 - lr: 8.0000e-04 - 2s/epoch - 7ms/step Epoch 83/100 283/283 - 2s - loss: 0.9217 - accuracy: 0.8187 - val_loss: 1.1953 - val_accuracy: 0.7513 - lr: 8.0000e-04 - 2s/epoch - 7ms/step Epoch 84/100 283/283 - 2s - loss: 0.9369 - accuracy: 0.8131 - val_loss: 1.1944 - val_accuracy: 0.7577 - lr: 8.0000e-04 - 2s/epoch - 8ms/step Epoch 85/100 283/283 - 2s - loss: 0.9250 - accuracy: 0.8206 - val_loss: 1.3066 - val_accuracy: 0.7267 - lr: 8.0000e-04 - 2s/epoch - 7ms/step Epoch 86/100 283/283 - 2s - loss: 0.9117 - accuracy: 0.8230 - val_loss: 1.1884 - val_accuracy: 0.7573 - lr: 8.0000e-04 - 2s/epoch - 7ms/step Epoch 87/100 283/283 - 2s - loss: 0.9324 - accuracy: 0.8176 - val_loss: 1.2777 - val_accuracy: 0.7313 - lr: 8.0000e-04 - 2s/epoch - 8ms/step Epoch 88/100 283/283 - 2s - loss: 0.9226 - accuracy: 0.8197 - val_loss: 1.2433 - val_accuracy: 0.7360 - lr: 8.0000e-04 - 2s/epoch - 8ms/step Epoch 89/100 283/283 - 2s - loss: 0.9488 - accuracy: 0.8162 - val_loss: 1.2206 - val_accuracy: 0.7387 - lr: 8.0000e-04 - 2s/epoch - 8ms/step Epoch 90/100 283/283 - 2s - loss: 0.9304 - accuracy: 0.8183 - val_loss: 1.2002 - val_accuracy: 0.7483 - lr: 8.0000e-04 - 2s/epoch - 8ms/step Epoch 91/100 283/283 - 2s - loss: 0.9397 - accuracy: 0.8197 - val_loss: 1.2531 - val_accuracy: 0.7433 - lr: 8.0000e-04 - 2s/epoch - 7ms/step Epoch 92/100 Epoch 92: ReduceLROnPlateau reducing learning rate to 0.0006400000303983689. 283/283 - 2s - loss: 0.9214 - accuracy: 0.8247 - val_loss: 1.2786 - val_accuracy: 0.7397 - lr: 8.0000e-04 - 2s/epoch - 8ms/step Epoch 93/100 283/283 - 2s - loss: 0.8977 - accuracy: 0.8301 - val_loss: 1.1732 - val_accuracy: 0.7610 - lr: 6.4000e-04 - 2s/epoch - 8ms/step Epoch 94/100 283/283 - 2s - loss: 0.8353 - accuracy: 0.8443 - val_loss: 1.2760 - val_accuracy: 0.7207 - lr: 6.4000e-04 - 2s/epoch - 7ms/step Epoch 95/100 283/283 - 2s - loss: 0.7898 - accuracy: 0.8564 - val_loss: 1.1579 - val_accuracy: 0.7603 - lr: 6.4000e-04 - 2s/epoch - 8ms/step Epoch 96/100 283/283 - 2s - loss: 0.8003 - accuracy: 0.8457 - val_loss: 1.1358 - val_accuracy: 0.7597 - lr: 6.4000e-04 - 2s/epoch - 8ms/step Epoch 97/100 283/283 - 2s - loss: 0.7865 - accuracy: 0.8490 - val_loss: 1.1218 - val_accuracy: 0.7573 - lr: 6.4000e-04 - 2s/epoch - 8ms/step Epoch 98/100 283/283 - 2s - loss: 0.7733 - accuracy: 0.8558 - val_loss: 1.1637 - val_accuracy: 0.7570 - lr: 6.4000e-04 - 2s/epoch - 8ms/step Epoch 99/100 283/283 - 2s - loss: 0.7976 - accuracy: 0.8466 - val_loss: 1.1847 - val_accuracy: 0.7510 - lr: 6.4000e-04 - 2s/epoch - 8ms/step Epoch 100/100 283/283 - 2s - loss: 0.8170 - accuracy: 0.8440 - val_loss: 1.1840 - val_accuracy: 0.7473 - lr: 6.4000e-04 - 2s/epoch - 8ms/step 94/94 - 0s - loss: 1.1840 - accuracy: 0.7473 - 310ms/epoch - 3ms/step Baseline Error: 25.27%
From the graph, we can see that the model has stabilized a lot, now we just need to add some regularization layers to fix overfitting issue
model_128BNlrdReg = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(128, 128, 1)),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.Dropout(0.5),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dropout(0.5),
layers.Dense(32, activation='relu'),
layers.BatchNormalization(),
layers.Dense(15, activation='softmax')
])
model_128BNlrdReg.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model_128BNlrdReg.fit(X_train128, train_labels, validation_data=(X_val128, validation_labels),
epochs=100, batch_size=32, verbose=2, class_weight = class_weight,
callbacks=[ReduceLROnPlateau(monitor='val_loss', factor=0.8, patience=10, verbose=1, mode='auto', min_delta=0.0001, cooldown=0, min_lr=0)]) #lr decay
scores = model_128BNlrdReg.evaluate(X_val128, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df128.loc[len(df128)] = ['BNlrdRegModel', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/100
2023-11-26 13:44:12.486696: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Size of values 0 does not match size of permutation 4 @ fanin shape insequential_18/dropout_22/dropout/SelectV2-2-TransposeNHWCToNCHW-LayoutOptimizer
283/283 - 6s - loss: 3.5422 - accuracy: 0.0886 - val_loss: 2.8525 - val_accuracy: 0.0663 - lr: 0.0010 - 6s/epoch - 20ms/step Epoch 2/100 283/283 - 3s - loss: 3.1700 - accuracy: 0.1360 - val_loss: 2.9036 - val_accuracy: 0.1030 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 3/100 283/283 - 3s - loss: 3.0094 - accuracy: 0.1611 - val_loss: 2.8424 - val_accuracy: 0.1157 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 4/100 283/283 - 3s - loss: 2.8992 - accuracy: 0.1741 - val_loss: 2.7713 - val_accuracy: 0.1397 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 5/100 283/283 - 3s - loss: 2.8954 - accuracy: 0.1740 - val_loss: 2.8554 - val_accuracy: 0.1200 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 6/100 283/283 - 3s - loss: 2.8303 - accuracy: 0.1873 - val_loss: 2.7977 - val_accuracy: 0.1247 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 7/100 283/283 - 3s - loss: 2.8189 - accuracy: 0.1999 - val_loss: 2.6420 - val_accuracy: 0.1763 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 8/100 283/283 - 3s - loss: 2.7850 - accuracy: 0.2131 - val_loss: 2.6222 - val_accuracy: 0.1847 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 9/100 283/283 - 3s - loss: 2.6856 - accuracy: 0.2434 - val_loss: 2.5768 - val_accuracy: 0.2170 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 10/100 283/283 - 3s - loss: 2.6362 - accuracy: 0.2610 - val_loss: 2.4972 - val_accuracy: 0.2487 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 11/100 283/283 - 3s - loss: 2.5614 - accuracy: 0.2712 - val_loss: 2.4379 - val_accuracy: 0.2583 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 12/100 283/283 - 3s - loss: 2.5106 - accuracy: 0.2861 - val_loss: 2.2692 - val_accuracy: 0.3233 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 13/100 283/283 - 3s - loss: 2.4080 - accuracy: 0.3141 - val_loss: 2.2834 - val_accuracy: 0.3100 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 14/100 283/283 - 3s - loss: 2.3227 - accuracy: 0.3365 - val_loss: 2.3558 - val_accuracy: 0.2957 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 15/100 283/283 - 3s - loss: 2.2967 - accuracy: 0.3492 - val_loss: 2.2657 - val_accuracy: 0.3143 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 16/100 283/283 - 3s - loss: 2.2140 - accuracy: 0.3778 - val_loss: 2.1286 - val_accuracy: 0.3807 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 17/100 283/283 - 2s - loss: 2.1355 - accuracy: 0.4054 - val_loss: 2.0822 - val_accuracy: 0.4057 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 18/100 283/283 - 2s - loss: 2.0635 - accuracy: 0.4320 - val_loss: 1.9172 - val_accuracy: 0.4710 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 19/100 283/283 - 3s - loss: 1.9885 - accuracy: 0.4578 - val_loss: 2.0107 - val_accuracy: 0.4223 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 20/100 283/283 - 3s - loss: 1.9051 - accuracy: 0.4821 - val_loss: 1.8197 - val_accuracy: 0.5053 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 21/100 283/283 - 3s - loss: 1.8847 - accuracy: 0.5007 - val_loss: 1.7178 - val_accuracy: 0.5510 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 22/100 283/283 - 3s - loss: 1.8577 - accuracy: 0.5167 - val_loss: 1.9005 - val_accuracy: 0.4753 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 23/100 283/283 - 2s - loss: 1.8084 - accuracy: 0.5358 - val_loss: 1.7777 - val_accuracy: 0.5330 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 24/100 283/283 - 2s - loss: 1.7245 - accuracy: 0.5636 - val_loss: 1.9676 - val_accuracy: 0.4893 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 25/100 283/283 - 2s - loss: 1.8596 - accuracy: 0.5377 - val_loss: 1.6073 - val_accuracy: 0.6013 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 26/100 283/283 - 2s - loss: 1.6715 - accuracy: 0.5837 - val_loss: 1.6792 - val_accuracy: 0.5880 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 27/100 283/283 - 3s - loss: 1.6363 - accuracy: 0.6010 - val_loss: 1.4992 - val_accuracy: 0.6580 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 28/100 283/283 - 3s - loss: 1.6220 - accuracy: 0.6086 - val_loss: 1.5807 - val_accuracy: 0.6373 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 29/100 283/283 - 3s - loss: 1.5989 - accuracy: 0.6122 - val_loss: 1.4592 - val_accuracy: 0.6723 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 30/100 283/283 - 3s - loss: 1.5712 - accuracy: 0.6277 - val_loss: 1.4957 - val_accuracy: 0.6550 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 31/100 283/283 - 2s - loss: 1.5695 - accuracy: 0.6294 - val_loss: 1.4029 - val_accuracy: 0.6870 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 32/100 283/283 - 2s - loss: 1.5218 - accuracy: 0.6391 - val_loss: 1.3725 - val_accuracy: 0.6960 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 33/100 283/283 - 2s - loss: 1.4935 - accuracy: 0.6491 - val_loss: 1.3751 - val_accuracy: 0.6953 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 34/100 283/283 - 3s - loss: 1.4601 - accuracy: 0.6654 - val_loss: 1.3412 - val_accuracy: 0.7073 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 35/100 283/283 - 2s - loss: 1.4468 - accuracy: 0.6659 - val_loss: 1.5295 - val_accuracy: 0.6530 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 36/100 283/283 - 2s - loss: 1.6871 - accuracy: 0.6093 - val_loss: 1.3743 - val_accuracy: 0.7107 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 37/100 283/283 - 2s - loss: 1.4717 - accuracy: 0.6687 - val_loss: 1.3845 - val_accuracy: 0.7053 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 38/100 283/283 - 2s - loss: 1.4059 - accuracy: 0.6843 - val_loss: 1.3055 - val_accuracy: 0.7217 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 39/100 283/283 - 3s - loss: 1.4259 - accuracy: 0.6768 - val_loss: 1.3491 - val_accuracy: 0.7090 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 40/100 283/283 - 3s - loss: 1.4114 - accuracy: 0.6872 - val_loss: 1.3241 - val_accuracy: 0.7070 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 41/100 283/283 - 2s - loss: 1.4005 - accuracy: 0.6883 - val_loss: 1.2371 - val_accuracy: 0.7497 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 42/100 283/283 - 3s - loss: 1.3920 - accuracy: 0.6953 - val_loss: 1.2202 - val_accuracy: 0.7670 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 43/100 283/283 - 3s - loss: 1.3651 - accuracy: 0.7000 - val_loss: 1.2836 - val_accuracy: 0.7383 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 44/100 283/283 - 3s - loss: 1.3960 - accuracy: 0.6936 - val_loss: 1.2769 - val_accuracy: 0.7420 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 45/100 283/283 - 3s - loss: 1.3743 - accuracy: 0.7041 - val_loss: 1.2489 - val_accuracy: 0.7503 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 46/100 283/283 - 2s - loss: 1.3373 - accuracy: 0.7082 - val_loss: 1.3167 - val_accuracy: 0.7163 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 47/100 283/283 - 2s - loss: 1.3401 - accuracy: 0.7151 - val_loss: 1.2218 - val_accuracy: 0.7660 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 48/100 283/283 - 2s - loss: 1.3743 - accuracy: 0.7053 - val_loss: 1.2739 - val_accuracy: 0.7440 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 49/100 283/283 - 2s - loss: 1.3376 - accuracy: 0.7095 - val_loss: 1.4213 - val_accuracy: 0.6863 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 50/100 283/283 - 2s - loss: 1.3367 - accuracy: 0.7140 - val_loss: 1.1540 - val_accuracy: 0.7867 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 51/100 283/283 - 3s - loss: 1.2971 - accuracy: 0.7235 - val_loss: 1.1673 - val_accuracy: 0.7717 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 52/100 283/283 - 3s - loss: 1.3019 - accuracy: 0.7240 - val_loss: 1.3627 - val_accuracy: 0.7137 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 53/100 283/283 - 3s - loss: 1.3305 - accuracy: 0.7198 - val_loss: 1.2260 - val_accuracy: 0.7513 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 54/100 283/283 - 3s - loss: 1.3356 - accuracy: 0.7180 - val_loss: 1.2879 - val_accuracy: 0.7330 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 55/100 283/283 - 3s - loss: 1.2989 - accuracy: 0.7304 - val_loss: 1.1844 - val_accuracy: 0.7733 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 56/100 283/283 - 3s - loss: 1.3015 - accuracy: 0.7357 - val_loss: 1.2467 - val_accuracy: 0.7430 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 57/100 283/283 - 2s - loss: 1.3083 - accuracy: 0.7309 - val_loss: 1.3086 - val_accuracy: 0.7270 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 58/100 283/283 - 3s - loss: 1.2787 - accuracy: 0.7405 - val_loss: 1.3301 - val_accuracy: 0.7177 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 59/100 283/283 - 2s - loss: 1.3237 - accuracy: 0.7252 - val_loss: 1.2640 - val_accuracy: 0.7567 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 60/100 Epoch 60: ReduceLROnPlateau reducing learning rate to 0.000800000037997961. 283/283 - 2s - loss: 1.4047 - accuracy: 0.7174 - val_loss: 1.1620 - val_accuracy: 0.7920 - lr: 0.0010 - 2s/epoch - 9ms/step Epoch 61/100 283/283 - 2s - loss: 1.2507 - accuracy: 0.7520 - val_loss: 1.2160 - val_accuracy: 0.7553 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 62/100 283/283 - 2s - loss: 1.2166 - accuracy: 0.7519 - val_loss: 1.1330 - val_accuracy: 0.7737 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 63/100 283/283 - 3s - loss: 1.1532 - accuracy: 0.7704 - val_loss: 1.2495 - val_accuracy: 0.7357 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 64/100 283/283 - 2s - loss: 1.1674 - accuracy: 0.7651 - val_loss: 1.0435 - val_accuracy: 0.8130 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 65/100 283/283 - 3s - loss: 1.1518 - accuracy: 0.7653 - val_loss: 1.1250 - val_accuracy: 0.7783 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 66/100 283/283 - 2s - loss: 1.1349 - accuracy: 0.7684 - val_loss: 1.0568 - val_accuracy: 0.8020 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 67/100 283/283 - 3s - loss: 1.1469 - accuracy: 0.7679 - val_loss: 1.1103 - val_accuracy: 0.7783 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 68/100 283/283 - 2s - loss: 1.1155 - accuracy: 0.7729 - val_loss: 1.0306 - val_accuracy: 0.8063 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 69/100 283/283 - 2s - loss: 1.1467 - accuracy: 0.7623 - val_loss: 1.0582 - val_accuracy: 0.8017 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 70/100 283/283 - 2s - loss: 1.1416 - accuracy: 0.7729 - val_loss: 1.1218 - val_accuracy: 0.7740 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 71/100 283/283 - 2s - loss: 1.1615 - accuracy: 0.7635 - val_loss: 1.0773 - val_accuracy: 0.7903 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 72/100 283/283 - 2s - loss: 1.0868 - accuracy: 0.7883 - val_loss: 1.1262 - val_accuracy: 0.7720 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 73/100 283/283 - 2s - loss: 1.2951 - accuracy: 0.7366 - val_loss: 1.0768 - val_accuracy: 0.7980 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 74/100 283/283 - 2s - loss: 1.1343 - accuracy: 0.7719 - val_loss: 1.1072 - val_accuracy: 0.7940 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 75/100 283/283 - 2s - loss: 1.1278 - accuracy: 0.7714 - val_loss: 1.0631 - val_accuracy: 0.7947 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 76/100 283/283 - 3s - loss: 1.0606 - accuracy: 0.7935 - val_loss: 1.0270 - val_accuracy: 0.8110 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 77/100 283/283 - 3s - loss: 1.0851 - accuracy: 0.7836 - val_loss: 1.0737 - val_accuracy: 0.7857 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 78/100 283/283 - 2s - loss: 1.0729 - accuracy: 0.7903 - val_loss: 1.0875 - val_accuracy: 0.7787 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 79/100 283/283 - 2s - loss: 1.0570 - accuracy: 0.7904 - val_loss: 1.0630 - val_accuracy: 0.7963 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 80/100 283/283 - 2s - loss: 1.0704 - accuracy: 0.7894 - val_loss: 1.1247 - val_accuracy: 0.7663 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 81/100 283/283 - 2s - loss: 1.0742 - accuracy: 0.7874 - val_loss: 1.0333 - val_accuracy: 0.7917 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 82/100 283/283 - 2s - loss: 1.0699 - accuracy: 0.7894 - val_loss: 0.9854 - val_accuracy: 0.8123 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 83/100 283/283 - 2s - loss: 1.0652 - accuracy: 0.7908 - val_loss: 1.0012 - val_accuracy: 0.8123 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 84/100 283/283 - 2s - loss: 1.0644 - accuracy: 0.7938 - val_loss: 0.9689 - val_accuracy: 0.8317 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 85/100 283/283 - 2s - loss: 1.0449 - accuracy: 0.7942 - val_loss: 0.9964 - val_accuracy: 0.8150 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 86/100 283/283 - 3s - loss: 1.0611 - accuracy: 0.7926 - val_loss: 1.0034 - val_accuracy: 0.8093 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 87/100 283/283 - 3s - loss: 1.0536 - accuracy: 0.7945 - val_loss: 1.0575 - val_accuracy: 0.7927 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 88/100 283/283 - 2s - loss: 1.0693 - accuracy: 0.7913 - val_loss: 1.0119 - val_accuracy: 0.8130 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 89/100 283/283 - 2s - loss: 1.0978 - accuracy: 0.7877 - val_loss: 1.0387 - val_accuracy: 0.8023 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 90/100 283/283 - 2s - loss: 1.0937 - accuracy: 0.7858 - val_loss: 0.9855 - val_accuracy: 0.8200 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 91/100 283/283 - 3s - loss: 1.0395 - accuracy: 0.8035 - val_loss: 0.9721 - val_accuracy: 0.8213 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 92/100 283/283 - 3s - loss: 1.0432 - accuracy: 0.7955 - val_loss: 1.0471 - val_accuracy: 0.7977 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 93/100 283/283 - 2s - loss: 1.0299 - accuracy: 0.8100 - val_loss: 1.0614 - val_accuracy: 0.8013 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 94/100 Epoch 94: ReduceLROnPlateau reducing learning rate to 0.0006400000303983689. 283/283 - 2s - loss: 1.0801 - accuracy: 0.7966 - val_loss: 0.9740 - val_accuracy: 0.8270 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 95/100 283/283 - 2s - loss: 1.0064 - accuracy: 0.8148 - val_loss: 0.9737 - val_accuracy: 0.8240 - lr: 6.4000e-04 - 2s/epoch - 9ms/step Epoch 96/100 283/283 - 2s - loss: 0.9995 - accuracy: 0.8094 - val_loss: 0.9449 - val_accuracy: 0.8310 - lr: 6.4000e-04 - 2s/epoch - 9ms/step Epoch 97/100 283/283 - 2s - loss: 0.9911 - accuracy: 0.8130 - val_loss: 0.9276 - val_accuracy: 0.8380 - lr: 6.4000e-04 - 2s/epoch - 9ms/step Epoch 98/100 283/283 - 2s - loss: 0.9544 - accuracy: 0.8219 - val_loss: 0.9804 - val_accuracy: 0.8140 - lr: 6.4000e-04 - 2s/epoch - 9ms/step Epoch 99/100 283/283 - 2s - loss: 0.9362 - accuracy: 0.8216 - val_loss: 0.9211 - val_accuracy: 0.8277 - lr: 6.4000e-04 - 2s/epoch - 9ms/step Epoch 100/100 283/283 - 2s - loss: 0.9465 - accuracy: 0.8186 - val_loss: 0.8594 - val_accuracy: 0.8523 - lr: 6.4000e-04 - 2s/epoch - 9ms/step 94/94 - 0s - loss: 0.8594 - accuracy: 0.8523 - 305ms/epoch - 3ms/step Baseline Error: 14.77%
We can see that the model has excellent performance now, without much overfitting, not much jittering as well as a decent accuracy of almost 90%. The slight jitters are likely caused by the changes in learning rate. However as i will not be doing any tuning, the learning rate portion of the model will not be touched up on. We can see that the model has not fully converged, as the model already has a decent performance, i believe that letting it fully converge will result in an even better model with excellenet performance.
model_128BNlrdReg = models.Sequential([
layers.Conv2D(32, (3, 3), activation='relu', input_shape=(128, 128, 1)),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.Dropout(0.5),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dropout(0.5),
layers.Dense(32, activation='relu'),
layers.BatchNormalization(),
layers.Dense(15, activation='softmax')
])
model_128BNlrdReg.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model_128BNlrdReg.fit(X_train128, train_labels, validation_data=(X_val128, validation_labels),
epochs=350, batch_size=32, verbose=2, class_weight = class_weight, #increase epochs for guaranteed convergence
callbacks=[ReduceLROnPlateau(monitor='val_loss', factor=0.8, patience=10, verbose=1, mode='auto', min_delta=0.0001, cooldown=0, min_lr=0)]) #lr decay
scores = model_128BNlrdReg.evaluate(X_val128, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
df128.loc[len(df128)] = ['BNlrdRegModelXtraepochs', history.history['accuracy'][-1], history.history['val_accuracy'][-1], history.history['loss'][-1], history.history['val_loss'][-1], history]
Epoch 1/350
2023-11-26 15:03:47.938189: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Size of values 0 does not match size of permutation 4 @ fanin shape insequential_24/dropout_34/dropout/SelectV2-2-TransposeNHWCToNCHW-LayoutOptimizer
283/283 - 6s - loss: 3.0678 - accuracy: 0.1838 - val_loss: 2.7241 - val_accuracy: 0.1523 - lr: 0.0010 - 6s/epoch - 20ms/step Epoch 2/350 283/283 - 3s - loss: 2.5944 - accuracy: 0.2735 - val_loss: 2.4719 - val_accuracy: 0.2387 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 3/350 283/283 - 3s - loss: 2.3924 - accuracy: 0.3473 - val_loss: 2.2890 - val_accuracy: 0.3460 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 4/350 283/283 - 3s - loss: 2.1321 - accuracy: 0.4159 - val_loss: 1.9372 - val_accuracy: 0.4767 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 5/350 283/283 - 3s - loss: 1.9845 - accuracy: 0.4696 - val_loss: 1.8172 - val_accuracy: 0.5180 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 6/350 283/283 - 3s - loss: 1.8690 - accuracy: 0.5209 - val_loss: 1.7352 - val_accuracy: 0.5500 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 7/350 283/283 - 3s - loss: 1.8245 - accuracy: 0.5394 - val_loss: 2.0247 - val_accuracy: 0.4900 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 8/350 283/283 - 3s - loss: 1.8607 - accuracy: 0.5418 - val_loss: 1.6863 - val_accuracy: 0.6077 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 9/350 283/283 - 3s - loss: 1.7279 - accuracy: 0.5885 - val_loss: 1.7269 - val_accuracy: 0.6060 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 10/350 283/283 - 3s - loss: 1.6987 - accuracy: 0.6076 - val_loss: 1.6248 - val_accuracy: 0.6460 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 11/350 283/283 - 3s - loss: 1.6723 - accuracy: 0.6171 - val_loss: 1.5888 - val_accuracy: 0.6673 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 12/350 283/283 - 3s - loss: 1.6135 - accuracy: 0.6399 - val_loss: 1.6892 - val_accuracy: 0.6270 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 13/350 283/283 - 3s - loss: 1.6639 - accuracy: 0.6346 - val_loss: 1.6537 - val_accuracy: 0.6653 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 14/350 283/283 - 3s - loss: 1.6045 - accuracy: 0.6616 - val_loss: 1.5393 - val_accuracy: 0.6980 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 15/350 283/283 - 3s - loss: 1.5672 - accuracy: 0.6787 - val_loss: 1.6009 - val_accuracy: 0.6883 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 16/350 283/283 - 3s - loss: 1.5699 - accuracy: 0.6787 - val_loss: 1.6534 - val_accuracy: 0.6770 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 17/350 283/283 - 3s - loss: 1.5803 - accuracy: 0.6824 - val_loss: 1.5131 - val_accuracy: 0.7163 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 18/350 283/283 - 3s - loss: 1.5708 - accuracy: 0.6895 - val_loss: 1.7273 - val_accuracy: 0.6590 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 19/350 283/283 - 3s - loss: 1.5618 - accuracy: 0.6989 - val_loss: 1.4860 - val_accuracy: 0.7473 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 20/350 283/283 - 3s - loss: 1.5139 - accuracy: 0.7103 - val_loss: 1.4590 - val_accuracy: 0.7443 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 21/350 283/283 - 3s - loss: 1.5051 - accuracy: 0.7180 - val_loss: 1.4885 - val_accuracy: 0.7363 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 22/350 283/283 - 3s - loss: 1.4846 - accuracy: 0.7183 - val_loss: 1.5989 - val_accuracy: 0.7043 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 23/350 283/283 - 3s - loss: 1.4912 - accuracy: 0.7247 - val_loss: 1.5380 - val_accuracy: 0.7340 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 24/350 283/283 - 3s - loss: 1.5027 - accuracy: 0.7219 - val_loss: 1.4953 - val_accuracy: 0.7413 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 25/350 283/283 - 3s - loss: 1.4732 - accuracy: 0.7362 - val_loss: 1.5018 - val_accuracy: 0.7457 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 26/350 283/283 - 3s - loss: 1.4799 - accuracy: 0.7306 - val_loss: 1.5084 - val_accuracy: 0.7450 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 27/350 283/283 - 3s - loss: 1.4329 - accuracy: 0.7476 - val_loss: 1.6370 - val_accuracy: 0.7243 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 28/350 283/283 - 3s - loss: 1.4888 - accuracy: 0.7438 - val_loss: 1.4910 - val_accuracy: 0.7507 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 29/350 283/283 - 3s - loss: 1.4593 - accuracy: 0.7510 - val_loss: 2.0608 - val_accuracy: 0.5950 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 30/350 Epoch 30: ReduceLROnPlateau reducing learning rate to 0.000800000037997961. 283/283 - 3s - loss: 1.4488 - accuracy: 0.7525 - val_loss: 1.5298 - val_accuracy: 0.7507 - lr: 0.0010 - 3s/epoch - 9ms/step Epoch 31/350 283/283 - 3s - loss: 1.4048 - accuracy: 0.7630 - val_loss: 1.3696 - val_accuracy: 0.7837 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 32/350 283/283 - 3s - loss: 1.3253 - accuracy: 0.7759 - val_loss: 1.3595 - val_accuracy: 0.7827 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 33/350 283/283 - 3s - loss: 1.2966 - accuracy: 0.7794 - val_loss: 1.3295 - val_accuracy: 0.7867 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 34/350 283/283 - 3s - loss: 1.2828 - accuracy: 0.7848 - val_loss: 1.4173 - val_accuracy: 0.7597 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 35/350 283/283 - 3s - loss: 1.2698 - accuracy: 0.7893 - val_loss: 1.3647 - val_accuracy: 0.7713 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 36/350 283/283 - 3s - loss: 1.2532 - accuracy: 0.7871 - val_loss: 1.3976 - val_accuracy: 0.7627 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 37/350 283/283 - 3s - loss: 1.2622 - accuracy: 0.7839 - val_loss: 1.3490 - val_accuracy: 0.7710 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 38/350 283/283 - 3s - loss: 1.2640 - accuracy: 0.7840 - val_loss: 1.3227 - val_accuracy: 0.7797 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 39/350 283/283 - 3s - loss: 1.2482 - accuracy: 0.7914 - val_loss: 1.3562 - val_accuracy: 0.7767 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 40/350 283/283 - 3s - loss: 1.2499 - accuracy: 0.7900 - val_loss: 1.3259 - val_accuracy: 0.7813 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 41/350 283/283 - 2s - loss: 1.2324 - accuracy: 0.7992 - val_loss: 1.3595 - val_accuracy: 0.7727 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 42/350 283/283 - 3s - loss: 1.2158 - accuracy: 0.8036 - val_loss: 1.2406 - val_accuracy: 0.8047 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 43/350 283/283 - 3s - loss: 1.2034 - accuracy: 0.8021 - val_loss: 1.2210 - val_accuracy: 0.8123 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 44/350 283/283 - 3s - loss: 1.2341 - accuracy: 0.7966 - val_loss: 1.2540 - val_accuracy: 0.8063 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 45/350 283/283 - 3s - loss: 1.1996 - accuracy: 0.8139 - val_loss: 1.3065 - val_accuracy: 0.7897 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 46/350 283/283 - 2s - loss: 1.1893 - accuracy: 0.8123 - val_loss: 1.2650 - val_accuracy: 0.8007 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 47/350 283/283 - 2s - loss: 1.2023 - accuracy: 0.8104 - val_loss: 1.3176 - val_accuracy: 0.7957 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 48/350 283/283 - 3s - loss: 1.2029 - accuracy: 0.8089 - val_loss: 1.2689 - val_accuracy: 0.8100 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 49/350 283/283 - 2s - loss: 1.1976 - accuracy: 0.8095 - val_loss: 1.3170 - val_accuracy: 0.7817 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 50/350 283/283 - 3s - loss: 1.1751 - accuracy: 0.8191 - val_loss: 1.2102 - val_accuracy: 0.8217 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 51/350 283/283 - 3s - loss: 1.1532 - accuracy: 0.8234 - val_loss: 1.2187 - val_accuracy: 0.8170 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 52/350 283/283 - 3s - loss: 1.1291 - accuracy: 0.8263 - val_loss: 1.2663 - val_accuracy: 0.7987 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 53/350 283/283 - 3s - loss: 1.1368 - accuracy: 0.8296 - val_loss: 1.2303 - val_accuracy: 0.8150 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 54/350 283/283 - 3s - loss: 1.1897 - accuracy: 0.8178 - val_loss: 1.2587 - val_accuracy: 0.8070 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 55/350 283/283 - 3s - loss: 1.1996 - accuracy: 0.8185 - val_loss: 1.2255 - val_accuracy: 0.8257 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 56/350 283/283 - 3s - loss: 1.1518 - accuracy: 0.8268 - val_loss: 1.2198 - val_accuracy: 0.8177 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 57/350 283/283 - 3s - loss: 1.1655 - accuracy: 0.8245 - val_loss: 1.2061 - val_accuracy: 0.8207 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 58/350 283/283 - 3s - loss: 1.1707 - accuracy: 0.8237 - val_loss: 1.2282 - val_accuracy: 0.8160 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 59/350 283/283 - 3s - loss: 1.1113 - accuracy: 0.8356 - val_loss: 1.2952 - val_accuracy: 0.7940 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 60/350 283/283 - 3s - loss: 1.1404 - accuracy: 0.8302 - val_loss: 1.2658 - val_accuracy: 0.7913 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 61/350 283/283 - 3s - loss: 1.1408 - accuracy: 0.8365 - val_loss: 1.1905 - val_accuracy: 0.8287 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 62/350 283/283 - 3s - loss: 1.1338 - accuracy: 0.8310 - val_loss: 1.2151 - val_accuracy: 0.8190 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 63/350 283/283 - 3s - loss: 1.1307 - accuracy: 0.8350 - val_loss: 1.2305 - val_accuracy: 0.8183 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 64/350 283/283 - 3s - loss: 1.1278 - accuracy: 0.8370 - val_loss: 1.1761 - val_accuracy: 0.8307 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 65/350 283/283 - 3s - loss: 1.1164 - accuracy: 0.8409 - val_loss: 1.2995 - val_accuracy: 0.7937 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 66/350 283/283 - 3s - loss: 1.1200 - accuracy: 0.8405 - val_loss: 1.2103 - val_accuracy: 0.8233 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 67/350 283/283 - 3s - loss: 1.1806 - accuracy: 0.8271 - val_loss: 1.2946 - val_accuracy: 0.7997 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 68/350 283/283 - 3s - loss: 1.1100 - accuracy: 0.8477 - val_loss: 1.3410 - val_accuracy: 0.7937 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 69/350 283/283 - 3s - loss: 1.1019 - accuracy: 0.8480 - val_loss: 1.2960 - val_accuracy: 0.8000 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 70/350 283/283 - 3s - loss: 1.0846 - accuracy: 0.8480 - val_loss: 1.2126 - val_accuracy: 0.8187 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 71/350 283/283 - 3s - loss: 1.1277 - accuracy: 0.8395 - val_loss: 1.3035 - val_accuracy: 0.7990 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 72/350 283/283 - 3s - loss: 1.0943 - accuracy: 0.8463 - val_loss: 1.2179 - val_accuracy: 0.8210 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 73/350 283/283 - 3s - loss: 1.0950 - accuracy: 0.8492 - val_loss: 1.1453 - val_accuracy: 0.8440 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 74/350 283/283 - 3s - loss: 1.0919 - accuracy: 0.8481 - val_loss: 1.1126 - val_accuracy: 0.8507 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 75/350 283/283 - 3s - loss: 1.0863 - accuracy: 0.8530 - val_loss: 1.1431 - val_accuracy: 0.8380 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 76/350 283/283 - 3s - loss: 1.0682 - accuracy: 0.8595 - val_loss: 1.1599 - val_accuracy: 0.8353 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 77/350 283/283 - 3s - loss: 1.0661 - accuracy: 0.8573 - val_loss: 1.3301 - val_accuracy: 0.7817 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 78/350 283/283 - 3s - loss: 1.0854 - accuracy: 0.8479 - val_loss: 1.1736 - val_accuracy: 0.8313 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 79/350 283/283 - 3s - loss: 1.0747 - accuracy: 0.8531 - val_loss: 1.1313 - val_accuracy: 0.8463 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 80/350 283/283 - 2s - loss: 1.0660 - accuracy: 0.8598 - val_loss: 1.1325 - val_accuracy: 0.8397 - lr: 8.0000e-04 - 2s/epoch - 9ms/step Epoch 81/350 283/283 - 3s - loss: 1.0706 - accuracy: 0.8539 - val_loss: 1.3071 - val_accuracy: 0.7973 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 82/350 283/283 - 3s - loss: 1.1058 - accuracy: 0.8515 - val_loss: 1.1486 - val_accuracy: 0.8417 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 83/350 283/283 - 3s - loss: 1.0702 - accuracy: 0.8618 - val_loss: 1.1703 - val_accuracy: 0.8363 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 84/350 Epoch 84: ReduceLROnPlateau reducing learning rate to 0.0006400000303983689. 283/283 - 3s - loss: 1.0828 - accuracy: 0.8530 - val_loss: 1.1348 - val_accuracy: 0.8430 - lr: 8.0000e-04 - 3s/epoch - 9ms/step Epoch 85/350 283/283 - 3s - loss: 1.0097 - accuracy: 0.8743 - val_loss: 1.1019 - val_accuracy: 0.8520 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 86/350 283/283 - 3s - loss: 0.9840 - accuracy: 0.8692 - val_loss: 1.1048 - val_accuracy: 0.8363 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 87/350 283/283 - 3s - loss: 0.9603 - accuracy: 0.8761 - val_loss: 1.0191 - val_accuracy: 0.8643 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 88/350 283/283 - 3s - loss: 0.9285 - accuracy: 0.8796 - val_loss: 1.0421 - val_accuracy: 0.8520 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 89/350 283/283 - 3s - loss: 0.9061 - accuracy: 0.8789 - val_loss: 1.0636 - val_accuracy: 0.8423 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 90/350 283/283 - 3s - loss: 0.8900 - accuracy: 0.8844 - val_loss: 0.9970 - val_accuracy: 0.8543 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 91/350 283/283 - 3s - loss: 0.8951 - accuracy: 0.8850 - val_loss: 1.0180 - val_accuracy: 0.8507 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 92/350 283/283 - 3s - loss: 0.8991 - accuracy: 0.8798 - val_loss: 1.0900 - val_accuracy: 0.8333 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 93/350 283/283 - 3s - loss: 0.8920 - accuracy: 0.8826 - val_loss: 0.9770 - val_accuracy: 0.8620 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 94/350 283/283 - 3s - loss: 0.9159 - accuracy: 0.8696 - val_loss: 1.0327 - val_accuracy: 0.8363 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 95/350 283/283 - 3s - loss: 0.9314 - accuracy: 0.8682 - val_loss: 1.0597 - val_accuracy: 0.8443 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 96/350 283/283 - 3s - loss: 0.9039 - accuracy: 0.8809 - val_loss: 1.0084 - val_accuracy: 0.8513 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 97/350 283/283 - 3s - loss: 0.8945 - accuracy: 0.8800 - val_loss: 0.9613 - val_accuracy: 0.8740 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 98/350 283/283 - 3s - loss: 0.8927 - accuracy: 0.8836 - val_loss: 0.9751 - val_accuracy: 0.8603 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 99/350 283/283 - 3s - loss: 0.8744 - accuracy: 0.8877 - val_loss: 1.1536 - val_accuracy: 0.8160 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 100/350 283/283 - 3s - loss: 0.9015 - accuracy: 0.8747 - val_loss: 0.9730 - val_accuracy: 0.8613 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 101/350 283/283 - 3s - loss: 0.8719 - accuracy: 0.8855 - val_loss: 1.0586 - val_accuracy: 0.8400 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 102/350 283/283 - 3s - loss: 0.8829 - accuracy: 0.8788 - val_loss: 0.9849 - val_accuracy: 0.8580 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 103/350 283/283 - 3s - loss: 0.8495 - accuracy: 0.8921 - val_loss: 1.0131 - val_accuracy: 0.8423 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 104/350 283/283 - 3s - loss: 0.8616 - accuracy: 0.8874 - val_loss: 1.0055 - val_accuracy: 0.8500 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 105/350 283/283 - 3s - loss: 0.9085 - accuracy: 0.8776 - val_loss: 1.0996 - val_accuracy: 0.8270 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 106/350 283/283 - 3s - loss: 0.8719 - accuracy: 0.8826 - val_loss: 0.9756 - val_accuracy: 0.8620 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 107/350 Epoch 107: ReduceLROnPlateau reducing learning rate to 0.0005120000336319208. 283/283 - 3s - loss: 0.8718 - accuracy: 0.8827 - val_loss: 0.9651 - val_accuracy: 0.8653 - lr: 6.4000e-04 - 3s/epoch - 9ms/step Epoch 108/350 283/283 - 3s - loss: 0.8321 - accuracy: 0.8934 - val_loss: 0.9585 - val_accuracy: 0.8647 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 109/350 283/283 - 3s - loss: 0.7983 - accuracy: 0.8983 - val_loss: 0.9047 - val_accuracy: 0.8733 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 110/350 283/283 - 3s - loss: 0.7932 - accuracy: 0.8951 - val_loss: 0.9143 - val_accuracy: 0.8710 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 111/350 283/283 - 3s - loss: 0.7969 - accuracy: 0.8938 - val_loss: 0.9441 - val_accuracy: 0.8617 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 112/350 283/283 - 3s - loss: 0.7698 - accuracy: 0.9013 - val_loss: 0.9237 - val_accuracy: 0.8700 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 113/350 283/283 - 3s - loss: 0.7506 - accuracy: 0.9033 - val_loss: 0.8917 - val_accuracy: 0.8690 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 114/350 283/283 - 3s - loss: 0.7497 - accuracy: 0.9046 - val_loss: 0.9437 - val_accuracy: 0.8517 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 115/350 283/283 - 3s - loss: 0.7892 - accuracy: 0.8928 - val_loss: 0.8977 - val_accuracy: 0.8657 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 116/350 283/283 - 3s - loss: 0.7576 - accuracy: 0.8982 - val_loss: 0.8838 - val_accuracy: 0.8657 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 117/350 283/283 - 3s - loss: 0.7530 - accuracy: 0.9014 - val_loss: 0.8943 - val_accuracy: 0.8633 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 118/350 283/283 - 3s - loss: 0.7471 - accuracy: 0.8984 - val_loss: 0.8678 - val_accuracy: 0.8710 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 119/350 283/283 - 3s - loss: 0.7473 - accuracy: 0.8983 - val_loss: 0.9464 - val_accuracy: 0.8433 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 120/350 283/283 - 3s - loss: 0.7271 - accuracy: 0.9036 - val_loss: 0.8913 - val_accuracy: 0.8653 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 121/350 283/283 - 3s - loss: 0.7370 - accuracy: 0.9033 - val_loss: 0.9273 - val_accuracy: 0.8573 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 122/350 283/283 - 3s - loss: 0.7202 - accuracy: 0.9054 - val_loss: 0.8282 - val_accuracy: 0.8797 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 123/350 283/283 - 3s - loss: 0.7204 - accuracy: 0.9043 - val_loss: 0.9015 - val_accuracy: 0.8633 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 124/350 283/283 - 3s - loss: 0.7086 - accuracy: 0.9083 - val_loss: 0.8771 - val_accuracy: 0.8640 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 125/350 283/283 - 3s - loss: 0.7271 - accuracy: 0.9062 - val_loss: 0.8563 - val_accuracy: 0.8633 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 126/350 283/283 - 3s - loss: 0.7287 - accuracy: 0.9006 - val_loss: 0.8643 - val_accuracy: 0.8627 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 127/350 283/283 - 3s - loss: 0.7441 - accuracy: 0.8943 - val_loss: 0.8700 - val_accuracy: 0.8660 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 128/350 283/283 - 3s - loss: 0.7240 - accuracy: 0.9040 - val_loss: 0.9665 - val_accuracy: 0.8373 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 129/350 283/283 - 3s - loss: 0.7223 - accuracy: 0.9055 - val_loss: 0.8475 - val_accuracy: 0.8723 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 130/350 283/283 - 3s - loss: 0.6810 - accuracy: 0.9098 - val_loss: 0.8566 - val_accuracy: 0.8703 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 131/350 283/283 - 3s - loss: 0.6938 - accuracy: 0.9109 - val_loss: 0.8140 - val_accuracy: 0.8823 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 132/350 283/283 - 3s - loss: 0.7174 - accuracy: 0.9055 - val_loss: 0.8740 - val_accuracy: 0.8637 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 133/350 283/283 - 3s - loss: 0.7122 - accuracy: 0.9071 - val_loss: 0.8318 - val_accuracy: 0.8750 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 134/350 283/283 - 3s - loss: 0.6780 - accuracy: 0.9125 - val_loss: 0.8813 - val_accuracy: 0.8570 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 135/350 283/283 - 3s - loss: 0.6840 - accuracy: 0.9112 - val_loss: 0.9079 - val_accuracy: 0.8523 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 136/350 283/283 - 3s - loss: 0.6913 - accuracy: 0.9086 - val_loss: 0.8699 - val_accuracy: 0.8600 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 137/350 283/283 - 3s - loss: 0.7171 - accuracy: 0.9020 - val_loss: 0.8355 - val_accuracy: 0.8650 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 138/350 283/283 - 3s - loss: 0.7079 - accuracy: 0.9035 - val_loss: 0.7977 - val_accuracy: 0.8853 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 139/350 283/283 - 3s - loss: 0.6867 - accuracy: 0.9127 - val_loss: 0.8167 - val_accuracy: 0.8837 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 140/350 283/283 - 3s - loss: 0.6878 - accuracy: 0.9102 - val_loss: 0.8267 - val_accuracy: 0.8757 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 141/350 283/283 - 3s - loss: 0.7101 - accuracy: 0.9095 - val_loss: 0.8036 - val_accuracy: 0.8770 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 142/350 283/283 - 3s - loss: 0.7072 - accuracy: 0.9070 - val_loss: 0.9101 - val_accuracy: 0.8563 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 143/350 283/283 - 3s - loss: 0.6939 - accuracy: 0.9091 - val_loss: 0.8549 - val_accuracy: 0.8680 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 144/350 283/283 - 2s - loss: 0.7000 - accuracy: 0.9084 - val_loss: 0.9772 - val_accuracy: 0.8300 - lr: 5.1200e-04 - 2s/epoch - 9ms/step Epoch 145/350 283/283 - 3s - loss: 0.7061 - accuracy: 0.9037 - val_loss: 0.8522 - val_accuracy: 0.8737 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 146/350 283/283 - 3s - loss: 0.7293 - accuracy: 0.9045 - val_loss: 0.8383 - val_accuracy: 0.8740 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 147/350 283/283 - 3s - loss: 0.7122 - accuracy: 0.9051 - val_loss: 0.9163 - val_accuracy: 0.8580 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 148/350 Epoch 148: ReduceLROnPlateau reducing learning rate to 0.00040960004553198815. 283/283 - 3s - loss: 0.6876 - accuracy: 0.9140 - val_loss: 0.8071 - val_accuracy: 0.8843 - lr: 5.1200e-04 - 3s/epoch - 9ms/step Epoch 149/350 283/283 - 3s - loss: 0.6698 - accuracy: 0.9154 - val_loss: 0.8027 - val_accuracy: 0.8807 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 150/350 283/283 - 3s - loss: 0.6589 - accuracy: 0.9155 - val_loss: 0.7703 - val_accuracy: 0.8863 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 151/350 283/283 - 3s - loss: 0.6361 - accuracy: 0.9195 - val_loss: 0.8041 - val_accuracy: 0.8770 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 152/350 283/283 - 3s - loss: 0.6233 - accuracy: 0.9204 - val_loss: 0.7623 - val_accuracy: 0.8910 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 153/350 283/283 - 3s - loss: 0.6094 - accuracy: 0.9225 - val_loss: 0.8044 - val_accuracy: 0.8760 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 154/350 283/283 - 3s - loss: 0.6495 - accuracy: 0.9113 - val_loss: 0.7921 - val_accuracy: 0.8753 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 155/350 283/283 - 3s - loss: 0.6054 - accuracy: 0.9265 - val_loss: 0.7792 - val_accuracy: 0.8787 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 156/350 283/283 - 3s - loss: 0.6167 - accuracy: 0.9143 - val_loss: 0.7737 - val_accuracy: 0.8790 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 157/350 283/283 - 3s - loss: 0.6063 - accuracy: 0.9204 - val_loss: 0.7744 - val_accuracy: 0.8763 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 158/350 283/283 - 3s - loss: 0.5903 - accuracy: 0.9270 - val_loss: 0.7407 - val_accuracy: 0.8837 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 159/350 283/283 - 3s - loss: 0.5957 - accuracy: 0.9193 - val_loss: 0.7575 - val_accuracy: 0.8793 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 160/350 283/283 - 3s - loss: 0.5804 - accuracy: 0.9280 - val_loss: 0.7542 - val_accuracy: 0.8767 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 161/350 283/283 - 3s - loss: 0.5854 - accuracy: 0.9216 - val_loss: 0.7340 - val_accuracy: 0.8827 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 162/350 283/283 - 3s - loss: 0.5938 - accuracy: 0.9195 - val_loss: 0.7934 - val_accuracy: 0.8780 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 163/350 283/283 - 3s - loss: 0.5779 - accuracy: 0.9240 - val_loss: 0.8082 - val_accuracy: 0.8677 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 164/350 283/283 - 3s - loss: 0.5972 - accuracy: 0.9189 - val_loss: 0.7622 - val_accuracy: 0.8770 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 165/350 283/283 - 3s - loss: 0.5853 - accuracy: 0.9233 - val_loss: 0.7414 - val_accuracy: 0.8853 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 166/350 283/283 - 3s - loss: 0.5843 - accuracy: 0.9214 - val_loss: 0.7512 - val_accuracy: 0.8797 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 167/350 283/283 - 3s - loss: 0.5697 - accuracy: 0.9251 - val_loss: 0.7182 - val_accuracy: 0.8910 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 168/350 283/283 - 3s - loss: 0.5950 - accuracy: 0.9194 - val_loss: 0.7869 - val_accuracy: 0.8647 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 169/350 283/283 - 3s - loss: 0.6084 - accuracy: 0.9139 - val_loss: 0.7630 - val_accuracy: 0.8773 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 170/350 283/283 - 3s - loss: 0.5937 - accuracy: 0.9204 - val_loss: 0.7731 - val_accuracy: 0.8737 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 171/350 283/283 - 3s - loss: 0.5754 - accuracy: 0.9243 - val_loss: 0.7545 - val_accuracy: 0.8827 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 172/350 283/283 - 3s - loss: 0.5723 - accuracy: 0.9238 - val_loss: 0.7308 - val_accuracy: 0.8897 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 173/350 283/283 - 3s - loss: 0.6062 - accuracy: 0.9168 - val_loss: 0.7219 - val_accuracy: 0.8907 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 174/350 283/283 - 3s - loss: 0.5687 - accuracy: 0.9248 - val_loss: 0.8957 - val_accuracy: 0.8360 - lr: 4.0960e-04 - 3s/epoch - 9ms/step Epoch 175/350 283/283 - 2s - loss: 0.6160 - accuracy: 0.9136 - val_loss: 0.7217 - val_accuracy: 0.8917 - lr: 4.0960e-04 - 2s/epoch - 9ms/step Epoch 176/350 283/283 - 2s - loss: 0.5596 - accuracy: 0.9290 - val_loss: 0.7364 - val_accuracy: 0.8887 - lr: 4.0960e-04 - 2s/epoch - 9ms/step Epoch 177/350 Epoch 177: ReduceLROnPlateau reducing learning rate to 0.00032768002711236477. 283/283 - 2s - loss: 0.5754 - accuracy: 0.9277 - val_loss: 0.7204 - val_accuracy: 0.8887 - lr: 4.0960e-04 - 2s/epoch - 9ms/step Epoch 178/350 283/283 - 3s - loss: 0.5450 - accuracy: 0.9300 - val_loss: 0.6882 - val_accuracy: 0.8943 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 179/350 283/283 - 3s - loss: 0.5647 - accuracy: 0.9240 - val_loss: 0.7039 - val_accuracy: 0.8867 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 180/350 283/283 - 3s - loss: 0.5575 - accuracy: 0.9246 - val_loss: 0.7175 - val_accuracy: 0.8880 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 181/350 283/283 - 3s - loss: 0.5256 - accuracy: 0.9352 - val_loss: 0.7409 - val_accuracy: 0.8803 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 182/350 283/283 - 3s - loss: 0.5444 - accuracy: 0.9284 - val_loss: 0.6910 - val_accuracy: 0.8897 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 183/350 283/283 - 3s - loss: 0.5179 - accuracy: 0.9343 - val_loss: 0.6978 - val_accuracy: 0.8880 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 184/350 283/283 - 3s - loss: 0.5156 - accuracy: 0.9314 - val_loss: 0.6843 - val_accuracy: 0.8893 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 185/350 283/283 - 3s - loss: 0.5029 - accuracy: 0.9332 - val_loss: 0.6881 - val_accuracy: 0.8870 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 186/350 283/283 - 3s - loss: 0.4932 - accuracy: 0.9376 - val_loss: 0.6669 - val_accuracy: 0.8913 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 187/350 283/283 - 3s - loss: 0.5032 - accuracy: 0.9324 - val_loss: 0.6979 - val_accuracy: 0.8860 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 188/350 283/283 - 3s - loss: 0.4757 - accuracy: 0.9406 - val_loss: 0.6950 - val_accuracy: 0.8790 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 189/350 283/283 - 3s - loss: 0.4925 - accuracy: 0.9354 - val_loss: 0.6724 - val_accuracy: 0.8910 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 190/350 283/283 - 3s - loss: 0.4990 - accuracy: 0.9343 - val_loss: 0.6659 - val_accuracy: 0.8923 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 191/350 283/283 - 3s - loss: 0.4900 - accuracy: 0.9360 - val_loss: 0.6814 - val_accuracy: 0.8830 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 192/350 283/283 - 3s - loss: 0.4827 - accuracy: 0.9343 - val_loss: 0.6601 - val_accuracy: 0.8953 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 193/350 283/283 - 3s - loss: 0.4795 - accuracy: 0.9374 - val_loss: 0.6938 - val_accuracy: 0.8820 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 194/350 283/283 - 3s - loss: 0.4987 - accuracy: 0.9325 - val_loss: 0.6491 - val_accuracy: 0.8920 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 195/350 283/283 - 3s - loss: 0.4795 - accuracy: 0.9360 - val_loss: 0.6781 - val_accuracy: 0.8817 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 196/350 283/283 - 3s - loss: 0.4802 - accuracy: 0.9327 - val_loss: 0.6893 - val_accuracy: 0.8797 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 197/350 283/283 - 3s - loss: 0.4775 - accuracy: 0.9358 - val_loss: 0.6701 - val_accuracy: 0.8877 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 198/350 283/283 - 3s - loss: 0.4881 - accuracy: 0.9321 - val_loss: 0.6972 - val_accuracy: 0.8820 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 199/350 283/283 - 3s - loss: 0.4878 - accuracy: 0.9332 - val_loss: 0.6827 - val_accuracy: 0.8843 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 200/350 283/283 - 3s - loss: 0.5079 - accuracy: 0.9302 - val_loss: 0.7860 - val_accuracy: 0.8500 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 201/350 283/283 - 3s - loss: 0.4943 - accuracy: 0.9344 - val_loss: 0.6590 - val_accuracy: 0.8963 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 202/350 283/283 - 3s - loss: 0.4771 - accuracy: 0.9380 - val_loss: 0.6472 - val_accuracy: 0.8863 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 203/350 283/283 - 3s - loss: 0.4726 - accuracy: 0.9383 - val_loss: 0.7513 - val_accuracy: 0.8670 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 204/350 283/283 - 3s - loss: 0.4753 - accuracy: 0.9363 - val_loss: 0.6650 - val_accuracy: 0.8890 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 205/350 283/283 - 3s - loss: 0.4663 - accuracy: 0.9381 - val_loss: 0.6848 - val_accuracy: 0.8820 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 206/350 283/283 - 3s - loss: 0.4836 - accuracy: 0.9358 - val_loss: 0.6256 - val_accuracy: 0.8937 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 207/350 283/283 - 2s - loss: 0.4637 - accuracy: 0.9377 - val_loss: 0.6880 - val_accuracy: 0.8790 - lr: 3.2768e-04 - 2s/epoch - 9ms/step Epoch 208/350 283/283 - 2s - loss: 0.4775 - accuracy: 0.9356 - val_loss: 0.6673 - val_accuracy: 0.8900 - lr: 3.2768e-04 - 2s/epoch - 9ms/step Epoch 209/350 283/283 - 3s - loss: 0.4537 - accuracy: 0.9410 - val_loss: 0.6393 - val_accuracy: 0.8947 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 210/350 283/283 - 3s - loss: 0.4693 - accuracy: 0.9391 - val_loss: 0.6549 - val_accuracy: 0.8927 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 211/350 283/283 - 3s - loss: 0.4721 - accuracy: 0.9391 - val_loss: 0.6385 - val_accuracy: 0.8900 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 212/350 283/283 - 3s - loss: 0.4799 - accuracy: 0.9337 - val_loss: 0.6682 - val_accuracy: 0.8893 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 213/350 283/283 - 3s - loss: 0.4826 - accuracy: 0.9330 - val_loss: 0.6395 - val_accuracy: 0.8893 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 214/350 283/283 - 3s - loss: 0.4598 - accuracy: 0.9435 - val_loss: 0.6470 - val_accuracy: 0.8897 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 215/350 283/283 - 3s - loss: 0.4752 - accuracy: 0.9351 - val_loss: 0.6350 - val_accuracy: 0.8960 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 216/350 Epoch 216: ReduceLROnPlateau reducing learning rate to 0.0002621440216898918. 283/283 - 3s - loss: 0.4818 - accuracy: 0.9328 - val_loss: 0.6456 - val_accuracy: 0.8953 - lr: 3.2768e-04 - 3s/epoch - 9ms/step Epoch 217/350 283/283 - 3s - loss: 0.4669 - accuracy: 0.9390 - val_loss: 0.6547 - val_accuracy: 0.8883 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 218/350 283/283 - 3s - loss: 0.4499 - accuracy: 0.9405 - val_loss: 0.6292 - val_accuracy: 0.8957 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 219/350 283/283 - 3s - loss: 0.4359 - accuracy: 0.9440 - val_loss: 0.6646 - val_accuracy: 0.8873 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 220/350 283/283 - 3s - loss: 0.4302 - accuracy: 0.9428 - val_loss: 0.6571 - val_accuracy: 0.8797 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 221/350 283/283 - 3s - loss: 0.4456 - accuracy: 0.9386 - val_loss: 0.5998 - val_accuracy: 0.8987 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 222/350 283/283 - 3s - loss: 0.4408 - accuracy: 0.9392 - val_loss: 0.6188 - val_accuracy: 0.8920 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 223/350 283/283 - 3s - loss: 0.4275 - accuracy: 0.9440 - val_loss: 0.6119 - val_accuracy: 0.8990 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 224/350 283/283 - 3s - loss: 0.4150 - accuracy: 0.9445 - val_loss: 0.6681 - val_accuracy: 0.8817 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 225/350 283/283 - 3s - loss: 0.4248 - accuracy: 0.9405 - val_loss: 0.5992 - val_accuracy: 0.8983 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 226/350 283/283 - 3s - loss: 0.4124 - accuracy: 0.9458 - val_loss: 0.6519 - val_accuracy: 0.8810 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 227/350 283/283 - 3s - loss: 0.4113 - accuracy: 0.9427 - val_loss: 0.6308 - val_accuracy: 0.8957 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 228/350 283/283 - 3s - loss: 0.4151 - accuracy: 0.9437 - val_loss: 0.6207 - val_accuracy: 0.8890 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 229/350 283/283 - 3s - loss: 0.4233 - accuracy: 0.9423 - val_loss: 0.5957 - val_accuracy: 0.9010 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 230/350 283/283 - 3s - loss: 0.4002 - accuracy: 0.9443 - val_loss: 0.6133 - val_accuracy: 0.8943 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 231/350 283/283 - 3s - loss: 0.4073 - accuracy: 0.9423 - val_loss: 0.6044 - val_accuracy: 0.8953 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 232/350 283/283 - 3s - loss: 0.4098 - accuracy: 0.9417 - val_loss: 0.5869 - val_accuracy: 0.9040 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 233/350 283/283 - 3s - loss: 0.4198 - accuracy: 0.9406 - val_loss: 0.6292 - val_accuracy: 0.8873 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 234/350 283/283 - 3s - loss: 0.4073 - accuracy: 0.9451 - val_loss: 0.6347 - val_accuracy: 0.8810 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 235/350 283/283 - 3s - loss: 0.4118 - accuracy: 0.9402 - val_loss: 0.5860 - val_accuracy: 0.8973 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 236/350 283/283 - 3s - loss: 0.3988 - accuracy: 0.9468 - val_loss: 0.6072 - val_accuracy: 0.8963 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 237/350 283/283 - 3s - loss: 0.3920 - accuracy: 0.9476 - val_loss: 0.6060 - val_accuracy: 0.8917 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 238/350 283/283 - 3s - loss: 0.4029 - accuracy: 0.9423 - val_loss: 0.5898 - val_accuracy: 0.8993 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 239/350 283/283 - 2s - loss: 0.3998 - accuracy: 0.9414 - val_loss: 0.6187 - val_accuracy: 0.8923 - lr: 2.6214e-04 - 2s/epoch - 9ms/step Epoch 240/350 283/283 - 2s - loss: 0.3920 - accuracy: 0.9473 - val_loss: 0.6079 - val_accuracy: 0.9000 - lr: 2.6214e-04 - 2s/epoch - 9ms/step Epoch 241/350 283/283 - 3s - loss: 0.4030 - accuracy: 0.9421 - val_loss: 0.5726 - val_accuracy: 0.9047 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 242/350 283/283 - 3s - loss: 0.3903 - accuracy: 0.9457 - val_loss: 0.5888 - val_accuracy: 0.8963 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 243/350 283/283 - 3s - loss: 0.3923 - accuracy: 0.9457 - val_loss: 0.5999 - val_accuracy: 0.8953 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 244/350 283/283 - 3s - loss: 0.3877 - accuracy: 0.9479 - val_loss: 0.5885 - val_accuracy: 0.8907 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 245/350 283/283 - 3s - loss: 0.3991 - accuracy: 0.9420 - val_loss: 0.6032 - val_accuracy: 0.8920 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 246/350 283/283 - 3s - loss: 0.3940 - accuracy: 0.9451 - val_loss: 0.5862 - val_accuracy: 0.9000 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 247/350 283/283 - 3s - loss: 0.3843 - accuracy: 0.9474 - val_loss: 0.5933 - val_accuracy: 0.8960 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 248/350 283/283 - 3s - loss: 0.4161 - accuracy: 0.9411 - val_loss: 0.5760 - val_accuracy: 0.8967 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 249/350 283/283 - 3s - loss: 0.3826 - accuracy: 0.9462 - val_loss: 0.5884 - val_accuracy: 0.8987 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 250/350 283/283 - 3s - loss: 0.4022 - accuracy: 0.9428 - val_loss: 0.5911 - val_accuracy: 0.8993 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 251/350 Epoch 251: ReduceLROnPlateau reducing learning rate to 0.00020971521735191345. 283/283 - 3s - loss: 0.3931 - accuracy: 0.9457 - val_loss: 0.6014 - val_accuracy: 0.8913 - lr: 2.6214e-04 - 3s/epoch - 9ms/step Epoch 252/350 283/283 - 3s - loss: 0.3897 - accuracy: 0.9463 - val_loss: 0.5915 - val_accuracy: 0.8993 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 253/350 283/283 - 3s - loss: 0.3750 - accuracy: 0.9499 - val_loss: 0.5947 - val_accuracy: 0.8980 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 254/350 283/283 - 3s - loss: 0.3813 - accuracy: 0.9478 - val_loss: 0.5825 - val_accuracy: 0.8957 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 255/350 283/283 - 3s - loss: 0.3704 - accuracy: 0.9509 - val_loss: 0.6035 - val_accuracy: 0.8877 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 256/350 283/283 - 3s - loss: 0.3783 - accuracy: 0.9498 - val_loss: 0.5776 - val_accuracy: 0.8957 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 257/350 283/283 - 3s - loss: 0.3610 - accuracy: 0.9526 - val_loss: 0.5764 - val_accuracy: 0.8963 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 258/350 283/283 - 3s - loss: 0.3569 - accuracy: 0.9493 - val_loss: 0.5464 - val_accuracy: 0.9013 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 259/350 283/283 - 3s - loss: 0.3630 - accuracy: 0.9496 - val_loss: 0.5610 - val_accuracy: 0.8983 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 260/350 283/283 - 3s - loss: 0.3538 - accuracy: 0.9508 - val_loss: 0.5729 - val_accuracy: 0.8977 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 261/350 283/283 - 3s - loss: 0.3617 - accuracy: 0.9477 - val_loss: 0.5907 - val_accuracy: 0.8873 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 262/350 283/283 - 3s - loss: 0.3462 - accuracy: 0.9512 - val_loss: 0.5355 - val_accuracy: 0.9037 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 263/350 283/283 - 3s - loss: 0.3443 - accuracy: 0.9498 - val_loss: 0.5990 - val_accuracy: 0.8883 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 264/350 283/283 - 3s - loss: 0.3493 - accuracy: 0.9514 - val_loss: 0.5479 - val_accuracy: 0.9017 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 265/350 283/283 - 3s - loss: 0.3513 - accuracy: 0.9494 - val_loss: 0.5350 - val_accuracy: 0.9063 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 266/350 283/283 - 3s - loss: 0.3392 - accuracy: 0.9536 - val_loss: 0.5752 - val_accuracy: 0.8857 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 267/350 283/283 - 3s - loss: 0.3415 - accuracy: 0.9515 - val_loss: 0.5389 - val_accuracy: 0.9020 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 268/350 283/283 - 3s - loss: 0.3505 - accuracy: 0.9492 - val_loss: 0.5789 - val_accuracy: 0.8880 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 269/350 283/283 - 3s - loss: 0.3440 - accuracy: 0.9523 - val_loss: 0.5897 - val_accuracy: 0.8880 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 270/350 283/283 - 2s - loss: 0.3383 - accuracy: 0.9498 - val_loss: 0.5244 - val_accuracy: 0.9047 - lr: 2.0972e-04 - 2s/epoch - 9ms/step Epoch 271/350 283/283 - 3s - loss: 0.3148 - accuracy: 0.9577 - val_loss: 0.5496 - val_accuracy: 0.8977 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 272/350 283/283 - 2s - loss: 0.3465 - accuracy: 0.9527 - val_loss: 0.5509 - val_accuracy: 0.9013 - lr: 2.0972e-04 - 2s/epoch - 9ms/step Epoch 273/350 283/283 - 3s - loss: 0.3355 - accuracy: 0.9528 - val_loss: 0.5721 - val_accuracy: 0.8910 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 274/350 283/283 - 3s - loss: 0.3376 - accuracy: 0.9488 - val_loss: 0.5510 - val_accuracy: 0.8963 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 275/350 283/283 - 3s - loss: 0.3282 - accuracy: 0.9545 - val_loss: 0.5325 - val_accuracy: 0.9043 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 276/350 283/283 - 3s - loss: 0.3412 - accuracy: 0.9498 - val_loss: 0.5607 - val_accuracy: 0.8993 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 277/350 283/283 - 3s - loss: 0.3245 - accuracy: 0.9554 - val_loss: 0.5634 - val_accuracy: 0.8917 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 278/350 283/283 - 3s - loss: 0.3146 - accuracy: 0.9609 - val_loss: 0.5507 - val_accuracy: 0.9003 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 279/350 283/283 - 3s - loss: 0.3269 - accuracy: 0.9538 - val_loss: 0.5765 - val_accuracy: 0.8913 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 280/350 Epoch 280: ReduceLROnPlateau reducing learning rate to 0.00016777217388153076. 283/283 - 3s - loss: 0.3271 - accuracy: 0.9540 - val_loss: 0.5475 - val_accuracy: 0.8957 - lr: 2.0972e-04 - 3s/epoch - 9ms/step Epoch 281/350 283/283 - 3s - loss: 0.3410 - accuracy: 0.9503 - val_loss: 0.5362 - val_accuracy: 0.9000 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 282/350 283/283 - 2s - loss: 0.3208 - accuracy: 0.9536 - val_loss: 0.5267 - val_accuracy: 0.8993 - lr: 1.6777e-04 - 2s/epoch - 9ms/step Epoch 283/350 283/283 - 3s - loss: 0.3067 - accuracy: 0.9554 - val_loss: 0.5290 - val_accuracy: 0.9027 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 284/350 283/283 - 3s - loss: 0.3189 - accuracy: 0.9570 - val_loss: 0.5430 - val_accuracy: 0.8943 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 285/350 283/283 - 3s - loss: 0.3083 - accuracy: 0.9564 - val_loss: 0.5382 - val_accuracy: 0.8913 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 286/350 283/283 - 3s - loss: 0.3119 - accuracy: 0.9561 - val_loss: 0.5208 - val_accuracy: 0.9017 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 287/350 283/283 - 3s - loss: 0.3093 - accuracy: 0.9581 - val_loss: 0.6029 - val_accuracy: 0.8780 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 288/350 283/283 - 3s - loss: 0.3087 - accuracy: 0.9551 - val_loss: 0.4989 - val_accuracy: 0.9037 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 289/350 283/283 - 3s - loss: 0.3133 - accuracy: 0.9559 - val_loss: 0.4988 - val_accuracy: 0.9040 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 290/350 283/283 - 3s - loss: 0.2918 - accuracy: 0.9582 - val_loss: 0.4981 - val_accuracy: 0.9050 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 291/350 283/283 - 3s - loss: 0.2870 - accuracy: 0.9607 - val_loss: 0.4972 - val_accuracy: 0.9033 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 292/350 283/283 - 3s - loss: 0.2971 - accuracy: 0.9560 - val_loss: 0.5034 - val_accuracy: 0.9017 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 293/350 283/283 - 3s - loss: 0.3019 - accuracy: 0.9534 - val_loss: 0.5107 - val_accuracy: 0.8987 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 294/350 283/283 - 3s - loss: 0.2901 - accuracy: 0.9590 - val_loss: 0.5758 - val_accuracy: 0.8810 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 295/350 283/283 - 3s - loss: 0.3010 - accuracy: 0.9559 - val_loss: 0.5007 - val_accuracy: 0.9047 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 296/350 283/283 - 3s - loss: 0.2844 - accuracy: 0.9585 - val_loss: 0.5204 - val_accuracy: 0.8940 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 297/350 283/283 - 3s - loss: 0.2866 - accuracy: 0.9576 - val_loss: 0.5209 - val_accuracy: 0.9017 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 298/350 283/283 - 3s - loss: 0.2855 - accuracy: 0.9598 - val_loss: 0.5152 - val_accuracy: 0.9017 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 299/350 283/283 - 3s - loss: 0.2977 - accuracy: 0.9565 - val_loss: 0.4953 - val_accuracy: 0.9030 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 300/350 283/283 - 3s - loss: 0.2865 - accuracy: 0.9577 - val_loss: 0.5179 - val_accuracy: 0.8970 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 301/350 283/283 - 3s - loss: 0.2862 - accuracy: 0.9554 - val_loss: 0.5032 - val_accuracy: 0.8990 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 302/350 283/283 - 2s - loss: 0.2842 - accuracy: 0.9587 - val_loss: 0.5127 - val_accuracy: 0.9007 - lr: 1.6777e-04 - 2s/epoch - 9ms/step Epoch 303/350 283/283 - 3s - loss: 0.2861 - accuracy: 0.9557 - val_loss: 0.5044 - val_accuracy: 0.8997 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 304/350 283/283 - 3s - loss: 0.2682 - accuracy: 0.9612 - val_loss: 0.5321 - val_accuracy: 0.8983 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 305/350 283/283 - 3s - loss: 0.2805 - accuracy: 0.9576 - val_loss: 0.4801 - val_accuracy: 0.9067 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 306/350 283/283 - 3s - loss: 0.2849 - accuracy: 0.9560 - val_loss: 0.5031 - val_accuracy: 0.9047 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 307/350 283/283 - 3s - loss: 0.2803 - accuracy: 0.9600 - val_loss: 0.5055 - val_accuracy: 0.9007 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 308/350 283/283 - 3s - loss: 0.2768 - accuracy: 0.9590 - val_loss: 0.5100 - val_accuracy: 0.9023 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 309/350 283/283 - 3s - loss: 0.2770 - accuracy: 0.9571 - val_loss: 0.4886 - val_accuracy: 0.9080 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 310/350 283/283 - 3s - loss: 0.2775 - accuracy: 0.9596 - val_loss: 0.5140 - val_accuracy: 0.9013 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 311/350 283/283 - 3s - loss: 0.2850 - accuracy: 0.9566 - val_loss: 0.5387 - val_accuracy: 0.8933 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 312/350 283/283 - 3s - loss: 0.2865 - accuracy: 0.9558 - val_loss: 0.5325 - val_accuracy: 0.8930 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 313/350 283/283 - 3s - loss: 0.2763 - accuracy: 0.9557 - val_loss: 0.5317 - val_accuracy: 0.8980 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 314/350 283/283 - 3s - loss: 0.2787 - accuracy: 0.9595 - val_loss: 0.4918 - val_accuracy: 0.9050 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 315/350 Epoch 315: ReduceLROnPlateau reducing learning rate to 0.00013421773910522462. 283/283 - 3s - loss: 0.2802 - accuracy: 0.9591 - val_loss: 0.4918 - val_accuracy: 0.9033 - lr: 1.6777e-04 - 3s/epoch - 9ms/step Epoch 316/350 283/283 - 3s - loss: 0.2702 - accuracy: 0.9578 - val_loss: 0.4918 - val_accuracy: 0.9043 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 317/350 283/283 - 3s - loss: 0.2677 - accuracy: 0.9622 - val_loss: 0.5025 - val_accuracy: 0.9007 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 318/350 283/283 - 3s - loss: 0.2692 - accuracy: 0.9611 - val_loss: 0.4972 - val_accuracy: 0.9023 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 319/350 283/283 - 3s - loss: 0.2749 - accuracy: 0.9592 - val_loss: 0.4916 - val_accuracy: 0.9033 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 320/350 283/283 - 3s - loss: 0.2601 - accuracy: 0.9626 - val_loss: 0.4977 - val_accuracy: 0.9017 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 321/350 283/283 - 3s - loss: 0.2663 - accuracy: 0.9619 - val_loss: 0.4845 - val_accuracy: 0.9020 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 322/350 283/283 - 3s - loss: 0.2581 - accuracy: 0.9623 - val_loss: 0.4871 - val_accuracy: 0.9060 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 323/350 283/283 - 3s - loss: 0.2642 - accuracy: 0.9612 - val_loss: 0.4853 - val_accuracy: 0.9033 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 324/350 283/283 - 3s - loss: 0.2664 - accuracy: 0.9595 - val_loss: 0.4758 - val_accuracy: 0.9087 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 325/350 283/283 - 3s - loss: 0.2598 - accuracy: 0.9637 - val_loss: 0.5099 - val_accuracy: 0.9030 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 326/350 283/283 - 3s - loss: 0.2599 - accuracy: 0.9619 - val_loss: 0.5179 - val_accuracy: 0.8937 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 327/350 283/283 - 3s - loss: 0.2577 - accuracy: 0.9601 - val_loss: 0.5056 - val_accuracy: 0.8973 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 328/350 283/283 - 3s - loss: 0.2667 - accuracy: 0.9597 - val_loss: 0.4661 - val_accuracy: 0.9033 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 329/350 283/283 - 3s - loss: 0.2403 - accuracy: 0.9661 - val_loss: 0.4731 - val_accuracy: 0.9073 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 330/350 283/283 - 3s - loss: 0.2463 - accuracy: 0.9639 - val_loss: 0.4709 - val_accuracy: 0.9033 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 331/350 283/283 - 3s - loss: 0.2418 - accuracy: 0.9650 - val_loss: 0.4689 - val_accuracy: 0.9063 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 332/350 283/283 - 3s - loss: 0.2447 - accuracy: 0.9637 - val_loss: 0.4874 - val_accuracy: 0.9030 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 333/350 283/283 - 3s - loss: 0.2515 - accuracy: 0.9608 - val_loss: 0.4808 - val_accuracy: 0.9030 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 334/350 283/283 - 3s - loss: 0.2459 - accuracy: 0.9636 - val_loss: 0.5067 - val_accuracy: 0.8947 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 335/350 283/283 - 3s - loss: 0.2440 - accuracy: 0.9652 - val_loss: 0.4701 - val_accuracy: 0.9033 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 336/350 283/283 - 2s - loss: 0.2515 - accuracy: 0.9606 - val_loss: 0.4591 - val_accuracy: 0.9080 - lr: 1.3422e-04 - 2s/epoch - 9ms/step Epoch 337/350 283/283 - 3s - loss: 0.2553 - accuracy: 0.9625 - val_loss: 0.4799 - val_accuracy: 0.9023 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 338/350 283/283 - 3s - loss: 0.2407 - accuracy: 0.9647 - val_loss: 0.4700 - val_accuracy: 0.9037 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 339/350 283/283 - 3s - loss: 0.2452 - accuracy: 0.9597 - val_loss: 0.4669 - val_accuracy: 0.9040 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 340/350 283/283 - 3s - loss: 0.2533 - accuracy: 0.9595 - val_loss: 0.4654 - val_accuracy: 0.9030 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 341/350 283/283 - 3s - loss: 0.2539 - accuracy: 0.9599 - val_loss: 0.4802 - val_accuracy: 0.9047 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 342/350 283/283 - 3s - loss: 0.2416 - accuracy: 0.9634 - val_loss: 0.4625 - val_accuracy: 0.9067 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 343/350 283/283 - 3s - loss: 0.2453 - accuracy: 0.9613 - val_loss: 0.4682 - val_accuracy: 0.9070 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 344/350 283/283 - 3s - loss: 0.2330 - accuracy: 0.9661 - val_loss: 0.4676 - val_accuracy: 0.9067 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 345/350 283/283 - 3s - loss: 0.2338 - accuracy: 0.9641 - val_loss: 0.4593 - val_accuracy: 0.9063 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 346/350 283/283 - 3s - loss: 0.2522 - accuracy: 0.9621 - val_loss: 0.4534 - val_accuracy: 0.9063 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 347/350 283/283 - 3s - loss: 0.2438 - accuracy: 0.9623 - val_loss: 0.4868 - val_accuracy: 0.9000 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 348/350 283/283 - 3s - loss: 0.2538 - accuracy: 0.9610 - val_loss: 0.4613 - val_accuracy: 0.9047 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 349/350 283/283 - 3s - loss: 0.2523 - accuracy: 0.9606 - val_loss: 0.4606 - val_accuracy: 0.9040 - lr: 1.3422e-04 - 3s/epoch - 9ms/step Epoch 350/350 283/283 - 3s - loss: 0.2479 - accuracy: 0.9613 - val_loss: 0.4845 - val_accuracy: 0.8957 - lr: 1.3422e-04 - 3s/epoch - 9ms/step 94/94 - 0s - loss: 0.4845 - accuracy: 0.8957 - 307ms/epoch - 3ms/step Baseline Error: 10.43%
df128
| Model | Train Accuracy | Validation Accuracy | Train Loss | Validation Loss | History | |
|---|---|---|---|---|---|---|
| 0 | Base 128 Model | 0.985490 | 0.769333 | 0.045317 | 1.295857 | <keras.callbacks.History object at 0x7f5a16d74... |
| 1 | LeNet | 1.000000 | 0.704000 | 0.003636 | 1.203718 | <keras.callbacks.History object at 0x7f59ec532... |
| 2 | AlexNet | 0.105782 | 0.066667 | 2.637087 | 2.787983 | <keras.callbacks.History object at 0x7f59ec79e... |
| 3 | RegularizedModel | 0.964444 | 0.754333 | 0.478071 | 1.542403 | <keras.callbacks.History object at 0x7f5942cc5... |
| 4 | ExtraThickModel | 0.835290 | 0.766000 | 0.920840 | 1.247124 | <keras.callbacks.History object at 0x7f59428f2... |
| 5 | BNModel | 0.737483 | 0.373000 | 1.096512 | 3.277027 | <keras.callbacks.History object at 0x7f5960582... |
| 6 | BNlrdModel | 0.844041 | 0.747333 | 0.817036 | 1.183966 | <keras.callbacks.History object at 0x7f594289f... |
| 7 | BNlrdRegModel | 0.818564 | 0.852333 | 0.946453 | 0.859434 | <keras.callbacks.History object at 0x7f5942426... |
| 8 | BNlrdRegModelXtraepochs | 0.980284 | 0.932667 | 0.172018 | 0.384631 | <keras.callbacks.History object at 0x7f59344f9... |
We can see that the model has fully converged and has given us an excellent loss and accuracy compared to the other models, we can now save this model and treat it as the best model by far.
model_128BNlrdReg.save('Best128Model.h5')
plt.figure(figsize=(15, 8))
for index, row in df128[['Model', 'History']].iterrows():
model_name = row['Model']
history = row['History']
sns.lineplot(x=range(1, len(history.history['val_accuracy']) + 1), y=history.history['val_accuracy'], label=f'{model_name} Validation')
plt.title('128x128Model Accuracies Over Epochs')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
plt.figure(figsize=(15, 8))
for index, row in df128[['Model', 'History']].iterrows():
model_name = row['Model']
history = row['History']
accuracy_diff = [train_acc - val_acc for train_acc, val_acc in zip(history.history['accuracy'], history.history['val_accuracy'])]
sns.lineplot(x=range(1, len(accuracy_diff) + 1), y=accuracy_diff, label=f'{model_name}')
plt.title('Training Accuracy - Validation Accuracy Difference Over Epochs (128x128)')
plt.xlabel('Epochs')
plt.ylabel('Accuracy Difference (To see overfitting)')
plt.legend()
plt.show()
#convert tets to 128x128
X_test_resized = tf.image.resize(X_test, (128, 128))
X_test128 = X_test_resized.numpy().reshape(X_test_resized.shape[0], 128, 128, 1).astype('float32') / 255
test_labels = to_categorical(y_test)
#load the saved models weights
model128best = models.load_model('Best128Model.h5')
#evaluate the model on the test set
score = model128best.evaluate(X_test128, test_labels, verbose=0)
print("Test loss:", score[0])
print("Test accuracy:", score[1])
Test loss: 0.46002209186553955 Test accuracy: 0.9110000133514404
Same as for the 31x31 model, we will test the different ways to balance the data and see if there are any improvements/differences in the results.
# Convert datasets to NumPy arrays
train_images, train_labels_augmented = zip(*[(image, label) for image, label in train_data.as_numpy_iterator()])
#train set
X_train_aug = np.concatenate(train_images, axis=0)
y_train = np.concatenate(train_labels_augmented, axis=0)
# Resize images to (128, 128) using TensorFlow
X_train_resized = tf.image.resize(X_train_aug, (128, 128))
# Reshape and normalize
X_train128_aug = X_train_resized.numpy().reshape(X_train_resized.shape[0], 128, 128, 1).astype('float32') / 255
X_train128_augmented, y_train_augmented = augmentfill(y_train, augmentation_dict, X_train128_aug)
train_labels_augmented = to_categorical(y_train_augmented)
214 Augmented 214 images out of 214 Category 0: Augmented 214 images out of 214 254 Augmented 254 images out of 254 Category 1: Augmented 254 images out of 254 420 Augmented 420 images out of 535 115 Augmented 535 images out of 535 Category 2: Augmented 535 images out of 535 129 Augmented 129 images out of 129 Category 3: Augmented 129 images out of 129 219 Augmented 219 images out of 219 Category 4: Augmented 219 images out of 219 454 Augmented 454 images out of 454 Category 5: Augmented 454 images out of 454 346 Augmented 346 images out of 609 263 Augmented 609 images out of 609 Category 6: Augmented 609 images out of 609 256 Augmented 256 images out of 699 256 Augmented 512 images out of 699 187 Augmented 699 images out of 699 Category 7: Augmented 699 images out of 699 374 Augmented 374 images out of 374 Category 8: Augmented 374 images out of 374 205 Augmented 205 images out of 205 Category 9: Augmented 205 images out of 205 392 Augmented 392 images out of 392 Category 10: Augmented 392 images out of 392 373 Augmented 373 images out of 582 209 Augmented 582 images out of 582 Category 11: Augmented 582 images out of 582 144 Augmented 144 images out of 144 Category 12: Augmented 144 images out of 144 247 Augmented 247 images out of 708 247 Augmented 494 images out of 708 214 Augmented 708 images out of 708 Category 13: Augmented 708 images out of 708 Category 14: Augmented 0 images out of 0
def Goat128Model():
Goat128Model = models.Sequential([
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation='relu'),
layers.Dropout(0.5),
layers.Flatten(),
layers.Dense(64, activation='relu', kernel_regularizer=regularizers.l2(0.001)),
layers.Dropout(0.5),
layers.Dense(32, activation='relu'),
layers.BatchNormalization(),
layers.Dense(15, activation='softmax')
])
return Goat128Model
## Test with just augmented data
model = Goat128Model()
model.compile(optimizer='adam',
loss= 'categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train128_augmented, train_labels_augmented, validation_data=(X_val128, validation_labels),
epochs=150, batch_size=32, verbose=2, #no class weights
callbacks=[ReduceLROnPlateau(monitor='val_loss', factor=0.8, patience=10, verbose=1, mode='auto', min_delta=0.0001, cooldown=0, min_lr=0)])
scores = model.evaluate(X_val128, validation_labels, verbose=2)
print("Baseline Error: %.2f%%" % (100-scores[1]*100))
plot_history(history)
#save the model
model.save('Goat128Model.h5')
Epoch 1/150
2023-11-26 15:32:44.775856: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Size of values 0 does not match size of permutation 4 @ fanin shape insequential_26/dropout_37/dropout/SelectV2-2-TransposeNHWCToNCHW-LayoutOptimizer
448/448 - 5s - loss: 2.6684 - accuracy: 0.1368 - val_loss: 2.4384 - val_accuracy: 0.3227 - lr: 0.0010 - 5s/epoch - 12ms/step Epoch 2/150 448/448 - 2s - loss: 2.3791 - accuracy: 0.2314 - val_loss: 2.3074 - val_accuracy: 0.3077 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 3/150 448/448 - 2s - loss: 2.2801 - accuracy: 0.2824 - val_loss: 2.2174 - val_accuracy: 0.3433 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 4/150 448/448 - 2s - loss: 2.2408 - accuracy: 0.3180 - val_loss: 1.8148 - val_accuracy: 0.5583 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 5/150 448/448 - 2s - loss: 2.1665 - accuracy: 0.3541 - val_loss: 1.8193 - val_accuracy: 0.5200 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 6/150 448/448 - 2s - loss: 2.1445 - accuracy: 0.3696 - val_loss: 1.8455 - val_accuracy: 0.5717 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 7/150 448/448 - 2s - loss: 2.0904 - accuracy: 0.4020 - val_loss: 1.7306 - val_accuracy: 0.6083 - lr: 0.0010 - 2s/epoch - 6ms/step Epoch 8/150 448/448 - 2s - loss: 2.0414 - accuracy: 0.4202 - val_loss: 1.7930 - val_accuracy: 0.5527 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 9/150 448/448 - 2s - loss: 2.0364 - accuracy: 0.4255 - val_loss: 1.6595 - val_accuracy: 0.6233 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 10/150 448/448 - 2s - loss: 2.0405 - accuracy: 0.4385 - val_loss: 1.6953 - val_accuracy: 0.6407 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 11/150 448/448 - 2s - loss: 2.0001 - accuracy: 0.4565 - val_loss: 1.6250 - val_accuracy: 0.6377 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 12/150 448/448 - 2s - loss: 2.0130 - accuracy: 0.4562 - val_loss: 1.7206 - val_accuracy: 0.5903 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 13/150 448/448 - 2s - loss: 1.9791 - accuracy: 0.4722 - val_loss: 1.6351 - val_accuracy: 0.6400 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 14/150 448/448 - 2s - loss: 1.9626 - accuracy: 0.4786 - val_loss: 1.5224 - val_accuracy: 0.6727 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 15/150 448/448 - 2s - loss: 1.9537 - accuracy: 0.4789 - val_loss: 1.5791 - val_accuracy: 0.6670 - lr: 0.0010 - 2s/epoch - 6ms/step Epoch 16/150 448/448 - 2s - loss: 1.9524 - accuracy: 0.4853 - val_loss: 1.5554 - val_accuracy: 0.6837 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 17/150 448/448 - 2s - loss: 1.9222 - accuracy: 0.5024 - val_loss: 1.7579 - val_accuracy: 0.6107 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 18/150 448/448 - 2s - loss: 1.9249 - accuracy: 0.5021 - val_loss: 1.6826 - val_accuracy: 0.6050 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 19/150 448/448 - 2s - loss: 1.9233 - accuracy: 0.5073 - val_loss: 1.6363 - val_accuracy: 0.6450 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 20/150 448/448 - 2s - loss: 1.9027 - accuracy: 0.5088 - val_loss: 1.6191 - val_accuracy: 0.6533 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 21/150 448/448 - 2s - loss: 1.8853 - accuracy: 0.5178 - val_loss: 1.5389 - val_accuracy: 0.6860 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 22/150 448/448 - 2s - loss: 1.8918 - accuracy: 0.5201 - val_loss: 1.6431 - val_accuracy: 0.6440 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 23/150 448/448 - 2s - loss: 1.8911 - accuracy: 0.5237 - val_loss: 1.7688 - val_accuracy: 0.5983 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 24/150 Epoch 24: ReduceLROnPlateau reducing learning rate to 0.000800000037997961. 448/448 - 2s - loss: 1.9000 - accuracy: 0.5193 - val_loss: 1.5294 - val_accuracy: 0.7000 - lr: 0.0010 - 2s/epoch - 5ms/step Epoch 25/150 448/448 - 2s - loss: 1.8225 - accuracy: 0.5409 - val_loss: 1.5313 - val_accuracy: 0.6720 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 26/150 448/448 - 2s - loss: 1.8043 - accuracy: 0.5407 - val_loss: 1.4996 - val_accuracy: 0.6823 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 27/150 448/448 - 2s - loss: 1.7793 - accuracy: 0.5462 - val_loss: 1.4674 - val_accuracy: 0.6777 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 28/150 448/448 - 2s - loss: 1.7507 - accuracy: 0.5543 - val_loss: 1.5221 - val_accuracy: 0.6610 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 29/150 448/448 - 2s - loss: 1.7416 - accuracy: 0.5611 - val_loss: 1.5024 - val_accuracy: 0.6583 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 30/150 448/448 - 2s - loss: 1.7626 - accuracy: 0.5502 - val_loss: 1.4397 - val_accuracy: 0.6967 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 31/150 448/448 - 2s - loss: 1.7491 - accuracy: 0.5548 - val_loss: 1.4302 - val_accuracy: 0.6977 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 32/150 448/448 - 2s - loss: 1.7289 - accuracy: 0.5661 - val_loss: 1.4866 - val_accuracy: 0.6557 - lr: 8.0000e-04 - 2s/epoch - 6ms/step Epoch 33/150 448/448 - 2s - loss: 1.7621 - accuracy: 0.5586 - val_loss: 1.4604 - val_accuracy: 0.6833 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 34/150 448/448 - 2s - loss: 1.7042 - accuracy: 0.5749 - val_loss: 1.4293 - val_accuracy: 0.6860 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 35/150 448/448 - 2s - loss: 1.7112 - accuracy: 0.5733 - val_loss: 1.5784 - val_accuracy: 0.6357 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 36/150 448/448 - 2s - loss: 1.7050 - accuracy: 0.5745 - val_loss: 1.5775 - val_accuracy: 0.6447 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 37/150 448/448 - 2s - loss: 1.6928 - accuracy: 0.5790 - val_loss: 1.4507 - val_accuracy: 0.6757 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 38/150 448/448 - 2s - loss: 1.7021 - accuracy: 0.5782 - val_loss: 1.6419 - val_accuracy: 0.6213 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 39/150 448/448 - 2s - loss: 1.6823 - accuracy: 0.5826 - val_loss: 1.4355 - val_accuracy: 0.6883 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 40/150 448/448 - 2s - loss: 1.6654 - accuracy: 0.5869 - val_loss: 1.4459 - val_accuracy: 0.6803 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 41/150 448/448 - 2s - loss: 1.7178 - accuracy: 0.5727 - val_loss: 1.4524 - val_accuracy: 0.6700 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 42/150 448/448 - 2s - loss: 1.6905 - accuracy: 0.5835 - val_loss: 1.4272 - val_accuracy: 0.7030 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 43/150 448/448 - 2s - loss: 1.6962 - accuracy: 0.5772 - val_loss: 1.4246 - val_accuracy: 0.6963 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 44/150 448/448 - 2s - loss: 1.6977 - accuracy: 0.5822 - val_loss: 1.5071 - val_accuracy: 0.6513 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 45/150 448/448 - 2s - loss: 1.6877 - accuracy: 0.5814 - val_loss: 1.5003 - val_accuracy: 0.6530 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 46/150 448/448 - 2s - loss: 1.7013 - accuracy: 0.5816 - val_loss: 1.4574 - val_accuracy: 0.6747 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 47/150 448/448 - 2s - loss: 1.6604 - accuracy: 0.5948 - val_loss: 1.3953 - val_accuracy: 0.6920 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 48/150 448/448 - 2s - loss: 1.6661 - accuracy: 0.5903 - val_loss: 1.4611 - val_accuracy: 0.6837 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 49/150 448/448 - 2s - loss: 1.6947 - accuracy: 0.5807 - val_loss: 1.4316 - val_accuracy: 0.6943 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 50/150 448/448 - 2s - loss: 1.6588 - accuracy: 0.5951 - val_loss: 1.6163 - val_accuracy: 0.6117 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 51/150 448/448 - 2s - loss: 1.6511 - accuracy: 0.5952 - val_loss: 1.4198 - val_accuracy: 0.6910 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 52/150 448/448 - 2s - loss: 1.6461 - accuracy: 0.6015 - val_loss: 1.4608 - val_accuracy: 0.6510 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 53/150 448/448 - 2s - loss: 1.6580 - accuracy: 0.5952 - val_loss: 1.4299 - val_accuracy: 0.6800 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 54/150 448/448 - 2s - loss: 1.6552 - accuracy: 0.5948 - val_loss: 1.4900 - val_accuracy: 0.6637 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 55/150 448/448 - 2s - loss: 1.6556 - accuracy: 0.5980 - val_loss: 1.4649 - val_accuracy: 0.6717 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 56/150 448/448 - 2s - loss: 1.6422 - accuracy: 0.6025 - val_loss: 1.5799 - val_accuracy: 0.6450 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 57/150 Epoch 57: ReduceLROnPlateau reducing learning rate to 0.0006400000303983689. 448/448 - 2s - loss: 1.6452 - accuracy: 0.5992 - val_loss: 1.6388 - val_accuracy: 0.6107 - lr: 8.0000e-04 - 2s/epoch - 5ms/step Epoch 58/150 448/448 - 2s - loss: 1.5925 - accuracy: 0.6139 - val_loss: 1.5221 - val_accuracy: 0.6460 - lr: 6.4000e-04 - 2s/epoch - 6ms/step Epoch 59/150 448/448 - 2s - loss: 1.5692 - accuracy: 0.6103 - val_loss: 1.6728 - val_accuracy: 0.5933 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 60/150 448/448 - 2s - loss: 1.5443 - accuracy: 0.6195 - val_loss: 1.3427 - val_accuracy: 0.7030 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 61/150 448/448 - 2s - loss: 1.5185 - accuracy: 0.6272 - val_loss: 1.4079 - val_accuracy: 0.6883 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 62/150 448/448 - 2s - loss: 1.5056 - accuracy: 0.6276 - val_loss: 1.3728 - val_accuracy: 0.6960 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 63/150 448/448 - 2s - loss: 1.5053 - accuracy: 0.6279 - val_loss: 1.4205 - val_accuracy: 0.6787 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 64/150 448/448 - 2s - loss: 1.5027 - accuracy: 0.6299 - val_loss: 1.4080 - val_accuracy: 0.6903 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 65/150 448/448 - 2s - loss: 1.4913 - accuracy: 0.6335 - val_loss: 1.4786 - val_accuracy: 0.6480 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 66/150 448/448 - 2s - loss: 1.4905 - accuracy: 0.6286 - val_loss: 1.3419 - val_accuracy: 0.6983 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 67/150 448/448 - 2s - loss: 1.4881 - accuracy: 0.6279 - val_loss: 1.3812 - val_accuracy: 0.6777 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 68/150 448/448 - 2s - loss: 1.5087 - accuracy: 0.6242 - val_loss: 1.4302 - val_accuracy: 0.6607 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 69/150 448/448 - 2s - loss: 1.5181 - accuracy: 0.6242 - val_loss: 1.3512 - val_accuracy: 0.6980 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 70/150 448/448 - 2s - loss: 1.4804 - accuracy: 0.6353 - val_loss: 1.4969 - val_accuracy: 0.6483 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 71/150 448/448 - 2s - loss: 1.5015 - accuracy: 0.6286 - val_loss: 1.3731 - val_accuracy: 0.6857 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 72/150 448/448 - 2s - loss: 1.4792 - accuracy: 0.6321 - val_loss: 1.3833 - val_accuracy: 0.6940 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 73/150 448/448 - 2s - loss: 1.4745 - accuracy: 0.6395 - val_loss: 1.3796 - val_accuracy: 0.6920 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 74/150 448/448 - 2s - loss: 1.4649 - accuracy: 0.6441 - val_loss: 1.3028 - val_accuracy: 0.7103 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 75/150 448/448 - 2s - loss: 1.4788 - accuracy: 0.6365 - val_loss: 1.5579 - val_accuracy: 0.6233 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 76/150 448/448 - 2s - loss: 1.4832 - accuracy: 0.6336 - val_loss: 1.3890 - val_accuracy: 0.6843 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 77/150 448/448 - 2s - loss: 1.4406 - accuracy: 0.6428 - val_loss: 1.4956 - val_accuracy: 0.6497 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 78/150 448/448 - 2s - loss: 1.4773 - accuracy: 0.6329 - val_loss: 1.4744 - val_accuracy: 0.6633 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 79/150 448/448 - 2s - loss: 1.4700 - accuracy: 0.6335 - val_loss: 1.4001 - val_accuracy: 0.6813 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 80/150 448/448 - 2s - loss: 1.4711 - accuracy: 0.6413 - val_loss: 1.3289 - val_accuracy: 0.7227 - lr: 6.4000e-04 - 2s/epoch - 5ms/step Epoch 81/150
model = models.load_model('Goat128Model.h5')
#evaluate the model on the test set
score = model.evaluate(X_test128, test_labels, verbose=0)
print("Test loss:", score[0])
print("Test accuracy:", score[1])
We can see that the model trained on the augmented data has poor performance compared to our original model. Although, one thing to note that is taht the model converges faster on the augmented data, although at a lower accuracy. To solve this issue, we can consider tuning the augmentation function to augment the data in another way. However, as tuning is not a part of the assingment, i will not be doing so.
The final model for 128x128 would be the model trained on the original data with class weights, as it has the highest test accuracy and lowest test loss out of all the models.